Merge branch 'main' into redsun82/codegen-rename-dbscheme

This commit is contained in:
Paolo Tranquilli
2025-03-27 15:12:16 +01:00
238 changed files with 15087 additions and 1702 deletions

View File

@@ -17,7 +17,7 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, macos-13, windows-2019]
os: [ubuntu-22.04, macos-13, windows-2019]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4

22
.github/workflows/go-tests-rtjo.yml vendored Normal file
View File

@@ -0,0 +1,22 @@
name: "Go: Run RTJO Tests"
on:
pull_request:
types:
- labeled
permissions:
contents: read
jobs:
test-linux:
if: "github.repository_owner == 'github' && github.event.label.name == 'Run: RTJO Language Tests'"
name: RTJO Test Linux (Ubuntu)
runs-on: ubuntu-latest-xl
steps:
- name: Check out code
uses: actions/checkout@v4
- name: Run tests
uses: ./go/actions/test
with:
run-code-checks: true
dynamic-join-order-mode: all

40
.github/workflows/ruby-qltest-rtjo.yml vendored Normal file
View File

@@ -0,0 +1,40 @@
name: "Ruby: Run RTJO Language Tests"
on:
pull_request:
types:
- opened
- synchronize
- reopened
- labeled
env:
CARGO_TERM_COLOR: always
defaults:
run:
working-directory: ruby
permissions:
contents: read
jobs:
qltest-rtjo:
if: "github.repository_owner == 'github' && github.event.label.name == 'Run: RTJO Language Tests'"
runs-on: ubuntu-latest-xl
strategy:
fail-fast: false
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/fetch-codeql
- uses: ./ruby/actions/create-extractor-pack
- name: Cache compilation cache
id: query-cache
uses: ./.github/actions/cache-query-compilation
with:
key: ruby-qltest
- name: Run QL tests
run: |
codeql test run --dynamic-join-order-mode=all --threads=0 --ram 50000 --search-path "${{ github.workspace }}" --check-databases --check-undefined-labels --check-unused-labels --check-repeated-labels --check-redefined-labels --check-use-before-definition --consistency-queries ql/consistency-queries ql/test --compilation-cache "${{ steps.query-cache.outputs.cache-dir }}"
env:
GITHUB_TOKEN: ${{ github.token }}

View File

@@ -0,0 +1,5 @@
---
category: fix
---
* The query `actions/code-injection/medium` now produces alerts for injection
vulnerabilities on `pull_request` events.

View File

@@ -30,6 +30,9 @@ extensions:
- ["pull_request_review_comment", "github.event.review"]
- ["pull_request_review_comment", "github.head_ref"]
- ["pull_request_review_comment", "github.event.changes"]
- ["pull_request", "github.event.pull_request"]
- ["pull_request", "github.head_ref"]
- ["pull_request", "github.event.changes"]
- ["pull_request_target", "github.event.pull_request"]
- ["pull_request_target", "github.head_ref"]
- ["pull_request_target", "github.event.changes"]

View File

@@ -12,6 +12,7 @@ extensions:
- ["pull_request_comment"]
- ["pull_request_review"]
- ["pull_request_review_comment"]
- ["pull_request"]
- ["pull_request_target"]
- ["workflow_run"] # depending on branch filter
- ["workflow_call"] # depending on caller

View File

@@ -400,6 +400,7 @@ nodes
| .github/workflows/level0.yml:44:20:44:49 | github.event.issue.body | semmle.label | github.event.issue.body |
| .github/workflows/level0.yml:69:35:69:66 | github.event.comment.body | semmle.label | github.event.comment.body |
| .github/workflows/level1.yml:37:38:37:81 | github.event.workflow_run.head_branch | semmle.label | github.event.workflow_run.head_branch |
| .github/workflows/priv_pull_request.yml:14:21:14:57 | github.event.pull_request.body | semmle.label | github.event.pull_request.body |
| .github/workflows/pull_request_review.yml:7:19:7:56 | github.event.pull_request.title | semmle.label | github.event.pull_request.title |
| .github/workflows/pull_request_review.yml:8:19:8:55 | github.event.pull_request.body | semmle.label | github.event.pull_request.body |
| .github/workflows/pull_request_review.yml:9:19:9:61 | github.event.pull_request.head.label | semmle.label | github.event.pull_request.head.label |
@@ -629,6 +630,7 @@ nodes
| .github/workflows/test19.yml:124:9:129:6 | Run Step: title3 [title] | semmle.label | Run Step: title3 [title] |
| .github/workflows/test19.yml:125:14:128:50 | TITLE=$(gh issue view "$ISSUE_NUMBER" --json title,author)\nTITLE=$(echo $TITLE \| jq -r '.title')\necho "title=$TITLE" >> "$GITHUB_OUTPUT"\n | semmle.label | TITLE=$(gh issue view "$ISSUE_NUMBER" --json title,author)\nTITLE=$(echo $TITLE \| jq -r '.title')\necho "title=$TITLE" >> "$GITHUB_OUTPUT"\n |
| .github/workflows/test19.yml:129:21:129:52 | steps.title3.outputs.title | semmle.label | steps.title3.outputs.title |
| .github/workflows/test20.yml:15:54:15:94 | github.event.pull_request.head.ref | semmle.label | github.event.pull_request.head.ref |
| .github/workflows/test21.yml:22:35:22:73 | github.event.head_commit.message | semmle.label | github.event.head_commit.message |
| .github/workflows/test21.yml:23:36:23:74 | github.event.head_commit.message | semmle.label | github.event.head_commit.message |
| .github/workflows/test21.yml:24:50:24:88 | github.event.head_commit.message | semmle.label | github.event.head_commit.message |

View File

@@ -400,6 +400,7 @@ nodes
| .github/workflows/level0.yml:44:20:44:49 | github.event.issue.body | semmle.label | github.event.issue.body |
| .github/workflows/level0.yml:69:35:69:66 | github.event.comment.body | semmle.label | github.event.comment.body |
| .github/workflows/level1.yml:37:38:37:81 | github.event.workflow_run.head_branch | semmle.label | github.event.workflow_run.head_branch |
| .github/workflows/priv_pull_request.yml:14:21:14:57 | github.event.pull_request.body | semmle.label | github.event.pull_request.body |
| .github/workflows/pull_request_review.yml:7:19:7:56 | github.event.pull_request.title | semmle.label | github.event.pull_request.title |
| .github/workflows/pull_request_review.yml:8:19:8:55 | github.event.pull_request.body | semmle.label | github.event.pull_request.body |
| .github/workflows/pull_request_review.yml:9:19:9:61 | github.event.pull_request.head.label | semmle.label | github.event.pull_request.head.label |
@@ -629,6 +630,7 @@ nodes
| .github/workflows/test19.yml:124:9:129:6 | Run Step: title3 [title] | semmle.label | Run Step: title3 [title] |
| .github/workflows/test19.yml:125:14:128:50 | TITLE=$(gh issue view "$ISSUE_NUMBER" --json title,author)\nTITLE=$(echo $TITLE \| jq -r '.title')\necho "title=$TITLE" >> "$GITHUB_OUTPUT"\n | semmle.label | TITLE=$(gh issue view "$ISSUE_NUMBER" --json title,author)\nTITLE=$(echo $TITLE \| jq -r '.title')\necho "title=$TITLE" >> "$GITHUB_OUTPUT"\n |
| .github/workflows/test19.yml:129:21:129:52 | steps.title3.outputs.title | semmle.label | steps.title3.outputs.title |
| .github/workflows/test20.yml:15:54:15:94 | github.event.pull_request.head.ref | semmle.label | github.event.pull_request.head.ref |
| .github/workflows/test21.yml:22:35:22:73 | github.event.head_commit.message | semmle.label | github.event.head_commit.message |
| .github/workflows/test21.yml:23:36:23:74 | github.event.head_commit.message | semmle.label | github.event.head_commit.message |
| .github/workflows/test21.yml:24:50:24:88 | github.event.head_commit.message | semmle.label | github.event.head_commit.message |
@@ -706,6 +708,7 @@ subpaths
| .github/workflows/inter-job2.yml:45:20:45:53 | needs.job1.outputs.job_output | .github/workflows/inter-job2.yml:22:9:26:6 | Uses Step: source | .github/workflows/inter-job2.yml:45:20:45:53 | needs.job1.outputs.job_output | Potential code injection in $@, which may be controlled by an external user. | .github/workflows/inter-job2.yml:45:20:45:53 | needs.job1.outputs.job_output | ${{needs.job1.outputs.job_output}} |
| .github/workflows/inter-job4.yml:44:20:44:53 | needs.job1.outputs.job_output | .github/workflows/inter-job4.yml:22:9:26:6 | Uses Step: source | .github/workflows/inter-job4.yml:44:20:44:53 | needs.job1.outputs.job_output | Potential code injection in $@, which may be controlled by an external user. | .github/workflows/inter-job4.yml:44:20:44:53 | needs.job1.outputs.job_output | ${{needs.job1.outputs.job_output}} |
| .github/workflows/inter-job5.yml:45:20:45:53 | needs.job1.outputs.job_output | .github/workflows/inter-job5.yml:45:20:45:53 | needs.job1.outputs.job_output | .github/workflows/inter-job5.yml:45:20:45:53 | needs.job1.outputs.job_output | Potential code injection in $@, which may be controlled by an external user. | .github/workflows/inter-job5.yml:45:20:45:53 | needs.job1.outputs.job_output | ${{needs.job1.outputs.job_output}} |
| .github/workflows/priv_pull_request.yml:14:21:14:57 | github.event.pull_request.body | .github/workflows/priv_pull_request.yml:14:21:14:57 | github.event.pull_request.body | .github/workflows/priv_pull_request.yml:14:21:14:57 | github.event.pull_request.body | Potential code injection in $@, which may be controlled by an external user. | .github/workflows/priv_pull_request.yml:14:21:14:57 | github.event.pull_request.body | ${{ github.event.pull_request.body }} |
| .github/workflows/push.yml:7:19:7:57 | github.event.commits[11].message | .github/workflows/push.yml:7:19:7:57 | github.event.commits[11].message | .github/workflows/push.yml:7:19:7:57 | github.event.commits[11].message | Potential code injection in $@, which may be controlled by an external user. | .github/workflows/push.yml:7:19:7:57 | github.event.commits[11].message | ${{ github.event.commits[11].message }} |
| .github/workflows/push.yml:8:19:8:62 | github.event.commits[11].author.email | .github/workflows/push.yml:8:19:8:62 | github.event.commits[11].author.email | .github/workflows/push.yml:8:19:8:62 | github.event.commits[11].author.email | Potential code injection in $@, which may be controlled by an external user. | .github/workflows/push.yml:8:19:8:62 | github.event.commits[11].author.email | ${{ github.event.commits[11].author.email }} |
| .github/workflows/push.yml:9:19:9:61 | github.event.commits[11].author.name | .github/workflows/push.yml:9:19:9:61 | github.event.commits[11].author.name | .github/workflows/push.yml:9:19:9:61 | github.event.commits[11].author.name | Potential code injection in $@, which may be controlled by an external user. | .github/workflows/push.yml:9:19:9:61 | github.event.commits[11].author.name | ${{ github.event.commits[11].author.name }} |

View File

@@ -1,14 +1,22 @@
using System;
using System.Diagnostics;
using System.Collections.Generic;
using System.IO;
using System.Security.Cryptography.X509Certificates;
using Semmle.Util;
using Semmle.Util.Logging;
using Newtonsoft.Json;
namespace Semmle.Extraction.CSharp.DependencyFetching
{
public class DependabotProxy : IDisposable
{
/// <summary>
/// Represents configurations for package registries.
/// </summary>
/// <param name="Type">The type of package registry.</param>
/// <param name="URL">The URL of the package registry.</param>
public record class RegistryConfig(string Type, string URL);
private readonly string host;
private readonly string port;
@@ -17,6 +25,10 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
/// </summary>
internal string Address { get; }
/// <summary>
/// The URLs of package registries that are configured for the proxy.
/// </summary>
internal HashSet<string> RegistryURLs { get; }
/// <summary>
/// The path to the temporary file where the certificate is stored.
/// </summary>
internal string? CertificatePath { get; private set; }
@@ -67,6 +79,39 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
result.Certificate = X509Certificate2.CreateFromPem(cert);
}
// Try to obtain the list of private registry URLs.
var registryURLs = Environment.GetEnvironmentVariable(EnvironmentVariableNames.ProxyURLs);
if (!string.IsNullOrWhiteSpace(registryURLs))
{
try
{
// The value of the environment variable should be a JSON array of objects, such as:
// [ { "type": "nuget_feed", "url": "https://nuget.pkg.github.com/org/index.json" } ]
var array = JsonConvert.DeserializeObject<List<RegistryConfig>>(registryURLs);
if (array is not null)
{
foreach (RegistryConfig config in array)
{
// The array contains all configured private registries, not just ones for C#.
// We ignore the non-C# ones here.
if (!config.Type.Equals("nuget_feed"))
{
logger.LogDebug($"Ignoring registry at '{config.URL}' since it is not of type 'nuget_feed'.");
continue;
}
logger.LogInfo($"Found private registry at '{config.URL}'");
result.RegistryURLs.Add(config.URL);
}
}
}
catch (JsonException ex)
{
logger.LogError($"Unable to parse '{EnvironmentVariableNames.ProxyURLs}': {ex.Message}");
}
}
return result;
}
@@ -75,6 +120,7 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
this.host = host;
this.port = port;
this.Address = $"http://{this.host}:{this.port}";
this.RegistryURLs = new HashSet<string>();
}
public void Dispose()

View File

@@ -2,7 +2,6 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Newtonsoft.Json.Linq;
using Semmle.Util;
@@ -77,6 +76,11 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
args += " /p:EnableWindowsTargeting=true";
}
if (restoreSettings.ExtraArgs is not null)
{
args += $" {restoreSettings.ExtraArgs}";
}
return args;
}

View File

@@ -89,5 +89,10 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
/// Contains the certificate used by the Dependabot proxy.
/// </summary>
public const string ProxyCertificate = "CODEQL_PROXY_CA_CERTIFICATE";
/// <summary>
/// Contains the URLs of private nuget registries as a JSON array.
/// </summary>
public const string ProxyURLs = "CODEQL_PROXY_URLS";
}
}

View File

@@ -17,7 +17,7 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
IList<string> GetNugetFeedsFromFolder(string folderPath);
}
public record class RestoreSettings(string File, string PackageDirectory, bool ForceDotnetRefAssemblyFetching, string? PathToNugetConfig = null, bool ForceReevaluation = false, bool TargetWindows = false);
public record class RestoreSettings(string File, string PackageDirectory, bool ForceDotnetRefAssemblyFetching, string? ExtraArgs = null, string? PathToNugetConfig = null, bool ForceReevaluation = false, bool TargetWindows = false);
public partial record class RestoreResult(bool Success, IList<string> Output)
{

View File

@@ -103,13 +103,14 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
compilationInfoContainer.CompilationInfos.Add(("NuGet feed responsiveness checked", checkNugetFeedResponsiveness ? "1" : "0"));
HashSet<string>? explicitFeeds = null;
HashSet<string>? allFeeds = null;
try
{
if (checkNugetFeedResponsiveness && !CheckFeeds(out explicitFeeds))
if (checkNugetFeedResponsiveness && !CheckFeeds(out explicitFeeds, out allFeeds))
{
// todo: we could also check the reachability of the inherited nuget feeds, but to use those in the fallback we would need to handle authentication too.
var unresponsiveMissingPackageLocation = DownloadMissingPackagesFromSpecificFeeds(explicitFeeds);
var unresponsiveMissingPackageLocation = DownloadMissingPackagesFromSpecificFeeds([], explicitFeeds);
return unresponsiveMissingPackageLocation is null
? []
: [unresponsiveMissingPackageLocation];
@@ -156,7 +157,7 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
var restoredProjects = RestoreSolutions(out var container);
var projects = fileProvider.Projects.Except(restoredProjects);
RestoreProjects(projects, out var containers);
RestoreProjects(projects, allFeeds, out var containers);
var dependencies = containers.Flatten(container);
@@ -166,11 +167,11 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
.ToList();
assemblyLookupLocations.UnionWith(paths.Select(p => new AssemblyLookupLocation(p)));
LogAllUnusedPackages(dependencies);
var usedPackageNames = GetAllUsedPackageDirNames(dependencies);
var missingPackageLocation = checkNugetFeedResponsiveness
? DownloadMissingPackagesFromSpecificFeeds(explicitFeeds)
: DownloadMissingPackages();
? DownloadMissingPackagesFromSpecificFeeds(usedPackageNames, explicitFeeds)
: DownloadMissingPackages(usedPackageNames);
if (missingPackageLocation is not null)
{
@@ -260,8 +261,33 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
/// Populates dependencies with the relative paths to the assets files generated by the restore.
/// </summary>
/// <param name="projects">A list of paths to project files.</param>
private void RestoreProjects(IEnumerable<string> projects, out ConcurrentBag<DependencyContainer> dependencies)
private void RestoreProjects(IEnumerable<string> projects, HashSet<string>? configuredSources, out ConcurrentBag<DependencyContainer> dependencies)
{
// Conservatively, we only set this to a non-null value if a Dependabot proxy is enabled.
// This ensures that we continue to get the old behaviour where feeds are taken from
// `nuget.config` files instead of the command-line arguments.
string? extraArgs = null;
if (this.dependabotProxy is not null)
{
// If the Dependabot proxy is configured, then our main goal is to make `dotnet` aware
// of the private registry feeds. However, since providing them as command-line arguments
// to `dotnet` ignores other feeds that may be configured, we also need to add the feeds
// we have discovered from analysing `nuget.config` files.
var sources = configuredSources ?? new();
this.dependabotProxy.RegistryURLs.ForEach(url => sources.Add(url));
// Add package sources. If any are present, they override all sources specified in
// the configuration file(s).
var feedArgs = new StringBuilder();
foreach (string source in sources)
{
feedArgs.Append($" -s {source}");
}
extraArgs = feedArgs.ToString();
}
var successCount = 0;
var nugetSourceFailures = 0;
ConcurrentBag<DependencyContainer> collectedDependencies = [];
@@ -276,7 +302,7 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
foreach (var project in projectGroup)
{
logger.LogInfo($"Restoring project {project}...");
var res = dotnet.Restore(new(project, PackageDirectory.DirInfo.FullName, ForceDotnetRefAssemblyFetching: true, TargetWindows: isWindows));
var res = dotnet.Restore(new(project, PackageDirectory.DirInfo.FullName, ForceDotnetRefAssemblyFetching: true, extraArgs, TargetWindows: isWindows));
assets.AddDependenciesRange(res.AssetsFilePaths);
lock (sync)
{
@@ -297,21 +323,21 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
compilationInfoContainer.CompilationInfos.Add(("Failed project restore with package source error", nugetSourceFailures.ToString()));
}
private AssemblyLookupLocation? DownloadMissingPackagesFromSpecificFeeds(HashSet<string>? feedsFromNugetConfigs)
private AssemblyLookupLocation? DownloadMissingPackagesFromSpecificFeeds(IEnumerable<string> usedPackageNames, HashSet<string>? feedsFromNugetConfigs)
{
var reachableFallbackFeeds = GetReachableFallbackNugetFeeds(feedsFromNugetConfigs);
if (reachableFallbackFeeds.Count > 0)
{
return DownloadMissingPackages(fallbackNugetFeeds: reachableFallbackFeeds);
return DownloadMissingPackages(usedPackageNames, fallbackNugetFeeds: reachableFallbackFeeds);
}
logger.LogWarning("Skipping download of missing packages from specific feeds as no fallback Nuget feeds are reachable.");
return null;
}
private AssemblyLookupLocation? DownloadMissingPackages(IEnumerable<string>? fallbackNugetFeeds = null)
private AssemblyLookupLocation? DownloadMissingPackages(IEnumerable<string> usedPackageNames, IEnumerable<string>? fallbackNugetFeeds = null)
{
var alreadyDownloadedPackages = GetRestoredPackageDirectoryNames(PackageDirectory.DirInfo);
var alreadyDownloadedPackages = usedPackageNames.Select(p => p.ToLowerInvariant());
var alreadyDownloadedLegacyPackages = GetRestoredLegacyPackageNames();
var notYetDownloadedPackages = new HashSet<PackageReference>(fileContent.AllPackages);
@@ -418,17 +444,23 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
return nugetConfig;
}
private void LogAllUnusedPackages(DependencyContainer dependencies)
private IEnumerable<string> GetAllUsedPackageDirNames(DependencyContainer dependencies)
{
var allPackageDirectories = GetAllPackageDirectories();
logger.LogInfo($"Restored {allPackageDirectories.Count} packages");
logger.LogInfo($"Found {dependencies.Packages.Count} packages in project.assets.json files");
allPackageDirectories
.Where(package => !dependencies.Packages.Contains(package))
var usage = allPackageDirectories.Select(package => (package, isUsed: dependencies.Packages.Contains(package)));
usage
.Where(package => !package.isUsed)
.Order()
.ForEach(package => logger.LogDebug($"Unused package: {package}"));
.ForEach(package => logger.LogDebug($"Unused package: {package.package}"));
return usage
.Where(package => package.isUsed)
.Select(package => package.package);
}
private ICollection<string> GetAllPackageDirectories()
@@ -674,10 +706,42 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
return (timeoutMilliSeconds, tryCount);
}
private bool CheckFeeds(out HashSet<string> explicitFeeds)
/// <summary>
/// Checks that we can connect to all Nuget feeds that are explicitly configured in configuration files
/// as well as any private package registry feeds that are configured.
/// </summary>
/// <param name="explicitFeeds">Outputs the set of explicit feeds.</param>
/// <param name="allFeeds">Outputs the set of all feeds (explicit and inherited).</param>
/// <returns>True if all feeds are reachable or false otherwise.</returns>
private bool CheckFeeds(out HashSet<string> explicitFeeds, out HashSet<string> allFeeds)
{
logger.LogInfo("Checking Nuget feeds...");
(explicitFeeds, var allFeeds) = GetAllFeeds();
(explicitFeeds, allFeeds) = GetAllFeeds();
HashSet<string> feedsToCheck = explicitFeeds;
// If private package registries are configured for C#, then check those
// in addition to the ones that are configured in `nuget.config` files.
this.dependabotProxy?.RegistryURLs.ForEach(url => feedsToCheck.Add(url));
var allFeedsReachable = this.CheckSpecifiedFeeds(feedsToCheck);
var inheritedFeeds = allFeeds.Except(explicitFeeds).ToHashSet();
if (inheritedFeeds.Count > 0)
{
logger.LogInfo($"Inherited Nuget feeds (not checked for reachability): {string.Join(", ", inheritedFeeds.OrderBy(f => f))}");
compilationInfoContainer.CompilationInfos.Add(("Inherited Nuget feed count", inheritedFeeds.Count.ToString()));
}
return allFeedsReachable;
}
/// <summary>
/// Checks that we can connect to the specified Nuget feeds.
/// </summary>
/// <param name="feeds">The set of package feeds to check.</param>
/// <returns>True if all feeds are reachable or false otherwise.</returns>
private bool CheckSpecifiedFeeds(HashSet<string> feeds)
{
logger.LogInfo("Checking that Nuget feeds are reachable...");
var excludedFeeds = EnvironmentVariables.GetURLs(EnvironmentVariableNames.ExcludedNugetFeedsFromResponsivenessCheck)
.ToHashSet();
@@ -689,7 +753,7 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
var (initialTimeout, tryCount) = GetFeedRequestSettings(isFallback: false);
var allFeedsReachable = explicitFeeds.All(feed => excludedFeeds.Contains(feed) || IsFeedReachable(feed, initialTimeout, tryCount));
var allFeedsReachable = feeds.All(feed => excludedFeeds.Contains(feed) || IsFeedReachable(feed, initialTimeout, tryCount));
if (!allFeedsReachable)
{
logger.LogWarning("Found unreachable Nuget feed in C# analysis with build-mode 'none'. This may cause missing dependencies in the analysis.");
@@ -704,14 +768,6 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
}
compilationInfoContainer.CompilationInfos.Add(("All Nuget feeds reachable", allFeedsReachable ? "1" : "0"));
var inheritedFeeds = allFeeds.Except(explicitFeeds).ToHashSet();
if (inheritedFeeds.Count > 0)
{
logger.LogInfo($"Inherited Nuget feeds (not checked for reachability): {string.Join(", ", inheritedFeeds.OrderBy(f => f))}");
compilationInfoContainer.CompilationInfos.Add(("Inherited Nuget feed count", inheritedFeeds.Count.ToString()));
}
return allFeedsReachable;
}
@@ -760,23 +816,33 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
}
// todo: this could be improved.
// We don't have to get the feeds from each of the folders from below, it would be enought to check the folders that recursively contain the others.
var allFeeds = nugetConfigs
.Select(config =>
{
try
HashSet<string>? allFeeds = null;
if (nugetConfigs.Count > 0)
{
// We don't have to get the feeds from each of the folders from below, it would be enought to check the folders that recursively contain the others.
allFeeds = nugetConfigs
.Select(config =>
{
return new FileInfo(config).Directory?.FullName;
}
catch (Exception exc)
{
logger.LogWarning($"Failed to get directory of '{config}': {exc}");
}
return null;
})
.Where(folder => folder != null)
.SelectMany(folder => GetFeeds(() => dotnet.GetNugetFeedsFromFolder(folder!)))
.ToHashSet();
try
{
return new FileInfo(config).Directory?.FullName;
}
catch (Exception exc)
{
logger.LogWarning($"Failed to get directory of '{config}': {exc}");
}
return null;
})
.Where(folder => folder != null)
.SelectMany(folder => GetFeeds(() => dotnet.GetNugetFeedsFromFolder(folder!)))
.ToHashSet();
}
else
{
// If we haven't found any `nuget.config` files, then obtain a list of feeds from the root source directory.
allFeeds = GetFeeds(() => dotnet.GetNugetFeedsFromFolder(this.fileProvider.SourceDir.FullName)).ToHashSet();
}
logger.LogInfo($"Found {allFeeds.Count} Nuget feeds (with inherited ones) in nuget.config files: {string.Join(", ", allFeeds.OrderBy(f => f))}");

View File

@@ -123,7 +123,7 @@ namespace Semmle.Extraction.Tests
var dotnet = MakeDotnet(dotnetCliInvoker);
// Execute
var res = dotnet.Restore(new("myproject.csproj", "mypackages", false, "myconfig.config"));
var res = dotnet.Restore(new("myproject.csproj", "mypackages", false, null, "myconfig.config"));
// Verify
var lastArgs = dotnetCliInvoker.GetLastArgs();
@@ -141,7 +141,7 @@ namespace Semmle.Extraction.Tests
var dotnet = MakeDotnet(dotnetCliInvoker);
// Execute
var res = dotnet.Restore(new("myproject.csproj", "mypackages", false, "myconfig.config", true));
var res = dotnet.Restore(new("myproject.csproj", "mypackages", false, null, "myconfig.config", true));
// Verify
var lastArgs = dotnetCliInvoker.GetLastArgs();

View File

@@ -4,7 +4,7 @@ source https://api.nuget.org/v3/index.json
# behave like nuget in choosing transitive dependency versions
strategy: max
nuget Basic.CompilerLog.Util
nuget Basic.CompilerLog.Util 0.9.8
nuget Mono.Posix.NETStandard
nuget Newtonsoft.Json
nuget xunit

12
csharp/paket.lock generated
View File

@@ -3,12 +3,12 @@ STRATEGY: MAX
RESTRICTION: == net9.0
NUGET
remote: https://api.nuget.org/v3/index.json
Basic.CompilerLog.Util (0.9.4)
Basic.CompilerLog.Util (0.9.8)
MessagePack (>= 2.5.187)
Microsoft.CodeAnalysis (>= 4.11)
Microsoft.CodeAnalysis.CSharp (>= 4.11)
Microsoft.CodeAnalysis.VisualBasic (>= 4.11)
Microsoft.Extensions.ObjectPool (>= 9.0)
Microsoft.CodeAnalysis (>= 4.12)
Microsoft.CodeAnalysis.CSharp (>= 4.12)
Microsoft.CodeAnalysis.VisualBasic (>= 4.12)
Microsoft.Extensions.ObjectPool (>= 9.0.2)
MSBuild.StructuredLogger (>= 2.2.243)
System.Buffers (>= 4.6)
Humanizer.Core (2.14.1)
@@ -96,7 +96,7 @@ NUGET
System.Reflection.Metadata (>= 8.0)
System.Threading.Channels (>= 7.0)
Microsoft.CodeCoverage (17.12)
Microsoft.Extensions.ObjectPool (9.0)
Microsoft.Extensions.ObjectPool (9.0.3)
Microsoft.NET.StringTools (17.12.6)
Microsoft.NET.Test.Sdk (17.12)
Microsoft.CodeCoverage (>= 17.12)

4
csharp/paket.main.bzl generated

File diff suppressed because one or more lines are too long

View File

@@ -1,6 +1,7 @@
| All Nuget feeds reachable | 1.0 |
| Failed project restore with package source error | 0.0 |
| Failed solution restore with package source error | 0.0 |
| Inherited Nuget feed count | 1.0 |
| NuGet feed responsiveness checked | 1.0 |
| Project files on filesystem | 1.0 |
| Reachable fallback Nuget feed count | 1.0 |

View File

@@ -1,6 +1,7 @@
| All Nuget feeds reachable | 1.0 |
| Failed project restore with package source error | 0.0 |
| Failed solution restore with package source error | 0.0 |
| Inherited Nuget feed count | 1.0 |
| NuGet feed responsiveness checked | 1.0 |
| Project files on filesystem | 1.0 |
| Reachable fallback Nuget feed count | 1.0 |

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* Improved dependency resolution in `build-mode: none` extraction to handle failing `dotnet restore` processes that managed to download a subset of the dependencies before the failure.

View File

@@ -233,6 +233,10 @@ module ModelGeneratorInput implements ModelGeneratorInputSig<Location, CsharpDat
result = ParamReturnNodeAsOutput<parameterContentAccess/1>::paramReturnNodeAsOutput(c, pos)
}
ParameterPosition getReturnKindParamPosition(ReturnKind kind) {
kind.(OutRefReturnKind).getPosition() = result.getPosition()
}
Callable returnNodeEnclosingCallable(DataFlow::Node ret) {
result = DataFlowImplCommon::getNodeEnclosingCallable(ret).asCallable(_)
}

View File

@@ -1034,3 +1034,40 @@ public class AvoidDuplicateLifted
}
}
}
public class ParameterModifiers
{
// contentbased-summary=Models;ParameterModifiers;false;Copy;(System.Object,System.Object);;Argument[0];Argument[1];value;dfc-generated
// summary=Models;ParameterModifiers;false;Copy;(System.Object,System.Object);;Argument[0];Argument[1];taint;df-generated
public void Copy(object key, out object value)
{
value = key;
}
// contentbased-summary=Models;ParameterModifiers;false;CopyToRef;(System.Object,System.Object);;Argument[0];Argument[1];value;dfc-generated
// summary=Models;ParameterModifiers;false;CopyToRef;(System.Object,System.Object);;Argument[0];Argument[1];taint;df-generated
public void CopyToRef(object key, ref object value)
{
value = key;
}
// No summaries as we disregard flow from a parameter to itself.
// neutral=Models;ParameterModifiers;RefParamFlowToSelf;(System.Object,System.Boolean);summary;df-generated
public void RefParamFlowToSelf(ref object value, bool b)
{
value = b ? value : null;
}
// neutral=Models;ParameterModifiers;RefParamUse;(System.Object);summary;df-generated
public void RefParamUse(ref object value)
{
var b = value is null;
}
// contentbased-summary=Models;ParameterModifiers;false;InReturn;(System.Object);;Argument[0];ReturnValue;value;dfc-generated
// summary=Models;ParameterModifiers;false;InReturn;(System.Object);;Argument[0];ReturnValue;taint;df-generated
public object InReturn(in object v)
{
return v;
}
}

View File

@@ -30,6 +30,8 @@ The following properties are supported by all query files:
+-----------------------+---------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| ``@id`` | ``<text>`` | A sequence of words composed of lowercase letters or digits, delimited by ``/`` or ``-``, identifying and classifying the query. Each query must have a **unique** ID. To ensure this, it may be helpful to use a fixed structure for each ID. For example, the standard CodeQL queries have the following format: ``<language>/<brief-description>``. |
+-----------------------+---------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| ``@previous-id`` | ``<text>`` | Indicates that query results were previously reported on a different query. The previous id should be a sequence of words composed of lowercase letters or digits, delimited by ``/`` or ``-``, identifying and classifying the previous query. |
+-----------------------+---------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
| ``@kind`` | | ``problem`` | Identifies the query is an alert (``@kind problem``) or a path (``@kind path-problem``). For more information on these query types, see ":doc:`About CodeQL queries <about-codeql-queries>`." |
| | | ``path-problem`` | |
+-----------------------+---------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+

View File

@@ -93,6 +93,10 @@ Note, `@id` properties should be consistent for queries that highlight the same
* `@id java/tainted-format-string`
* `@id cpp/tainted-format-string`
#### Query previous ID `@previous-id`
Queries with alerts that used to be reported on a different query should also have an `@previous-id` property to refer back to the query where the alerts were originally reported. For example, if alerts from `java/query-one` are now reported on `java/query-two`, then the metadata for `java/query-two` should contain: `@previous-id java/query-one`.
### Query type `@kind`
@@ -113,7 +117,7 @@ Alert queries (`@kind problem` or `path-problem`) support two further properties
* `medium`
* `high`
* `very-high`
* `@problem.severity`defines the likelihood that an alert, either security-related or not, causes an actual problem such as incorrect program behavior:
* `@problem.severity`defines the likelihood that an alert, either security-related or not, causes an actual problem such as incorrect program behavior:
* `error`an issue that is likely to cause incorrect program behavior, for example a crash or vulnerability.
* `warning`an issue that indicates a potential problem in the code, or makes the code fragile if another (unrelated) part of code is changed.
* `recommendation`an issue where the code behaves correctly, but it could be improved.

View File

@@ -52,9 +52,9 @@ ql/lib/go.dbscheme.stats: ql/lib/go.dbscheme build/stats/src.stamp extractor
codeql dataset measure -o $@ build/stats/database/db-go
test: all build/testdb/check-upgrade-path
codeql test run -j0 ql/test --search-path .. --consistency-queries ql/test/consistency --compilation-cache=$(cache)
codeql test run -j0 ql/test --search-path .. --consistency-queries ql/test/consistency --compilation-cache=$(cache) --dynamic-join-order-mode=$(rtjo)
# use GOOS=linux because GOOS=darwin GOARCH=386 is no longer supported
env GOOS=linux GOARCH=386 codeql$(EXE) test run -j0 ql/test/query-tests/Security/CWE-681 --search-path .. --consistency-queries ql/test/consistency --compilation-cache=$(cache)
env GOOS=linux GOARCH=386 codeql$(EXE) test run -j0 ql/test/query-tests/Security/CWE-681 --search-path .. --consistency-queries ql/test/consistency --compilation-cache=$(cache) --dynamic-join-order-mode=$(rtjo)
cd extractor; $(BAZEL) test ...
bash extractor-smoke-test/test.sh || (echo "Extractor smoke test FAILED"; exit 1)

View File

@@ -9,6 +9,10 @@ inputs:
description: Whether to run formatting, code and qhelp generation checks
required: false
default: false
dynamic-join-order-mode:
description: Value of the --dynamic-join-order-mode flag to pass to the codeql test command
required: false
default: "none"
runs:
using: composite
steps:
@@ -74,4 +78,4 @@ runs:
shell: bash
run: |
cd go
make test cache="${{ steps.query-cache.outputs.cache-dir }}"
make test cache="${{ steps.query-cache.outputs.cache-dir }}" rtjo=${{ inputs.dynamic-join-order-mode }}

View File

@@ -33,8 +33,8 @@ github.com/beego/beego,142,68,42,,,,68,,10,,,,,,60,4,,,,,26,,,42,,42,
github.com/caarlos0/env,,5,2,,,,,,,,,,,,,,,,,,,5,,,,1,1
github.com/clevergo/clevergo,1,,,,,,,,,,,,,,,,,1,,,,,,,,,
github.com/codeskyblue/go-sh,4,,,4,,,,,,,,,,,,,,,,,,,,,,,
github.com/couchbase/gocb,8,,18,,,,,8,,,,,,,,,,,,,,,,,,18,
github.com/couchbaselabs/gocb,8,,18,,,,,8,,,,,,,,,,,,,,,,,,18,
github.com/couchbase/gocb,8,22,48,,,,,8,,,,,,,,,,,,,22,,,,,48,
github.com/couchbaselabs/gocb,8,22,48,,,,,8,,,,,,,,,,,,,22,,,,,48,
github.com/crankycoder/xmlpath,2,,,,,,,,,,,,,,,,,,2,,,,,,,,
github.com/cristalhq/jwt,1,,,,1,,,,,,,,,,,,,,,,,,,,,,
github.com/davecgh/go-spew/spew,9,,,,,,9,,,,,,,,,,,,,,,,,,,,
@@ -107,7 +107,7 @@ google.golang.org/protobuf/internal/impl,,,2,,,,,,,,,,,,,,,,,,,,,,,2,
google.golang.org/protobuf/proto,,,8,,,,,,,,,,,,,,,,,,,,,,,8,
google.golang.org/protobuf/reflect/protoreflect,,,1,,,,,,,,,,,,,,,,,,,,,,,1,
gopkg.in/Masterminds/squirrel,32,,,,,,,,,,,,,,32,,,,,,,,,,,,
gopkg.in/couchbase/gocb,8,,18,,,,,8,,,,,,,,,,,,,,,,,,18,
gopkg.in/couchbase/gocb,8,22,48,,,,,8,,,,,,,,,,,,,22,,,,,48,
gopkg.in/glog,90,,,,,,90,,,,,,,,,,,,,,,,,,,,
gopkg.in/go-jose/go-jose,3,,4,,2,1,,,,,,,,,,,,,,,,,,,,4,
gopkg.in/go-xmlpath/xmlpath,2,,,,,,,,,,,,,,,,,,2,,,,,,,,
1 package sink source summary sink:command-injection sink:credentials-key sink:jwt sink:log-injection sink:nosql-injection sink:path-injection sink:regex-use[0] sink:regex-use[1] sink:regex-use[c] sink:request-forgery sink:request-forgery[TCP Addr + Port] sink:sql-injection sink:url-redirection sink:url-redirection[0] sink:url-redirection[receiver] sink:xpath-injection source:commandargs source:database source:environment source:file source:remote source:stdin summary:taint summary:value
33 github.com/caarlos0/env 5 2 5 1 1
34 github.com/clevergo/clevergo 1 1
35 github.com/codeskyblue/go-sh 4 4
36 github.com/couchbase/gocb 8 22 18 48 8 22 18 48
37 github.com/couchbaselabs/gocb 8 22 18 48 8 22 18 48
38 github.com/crankycoder/xmlpath 2 2
39 github.com/cristalhq/jwt 1 1
40 github.com/davecgh/go-spew/spew 9 9
107 google.golang.org/protobuf/proto 8 8
108 google.golang.org/protobuf/reflect/protoreflect 1 1
109 gopkg.in/Masterminds/squirrel 32 32
110 gopkg.in/couchbase/gocb 8 22 18 48 8 22 18 48
111 gopkg.in/glog 90 90
112 gopkg.in/go-jose/go-jose 3 4 2 1 4
113 gopkg.in/go-xmlpath/xmlpath 2 2

View File

@@ -10,8 +10,8 @@ Go framework & library support
`Afero <https://github.com/spf13/afero>`_,``github.com/spf13/afero*``,,,34
`Bun <https://bun.uptrace.dev/>`_,``github.com/uptrace/bun*``,,,63
`CleverGo <https://github.com/clevergo/clevergo>`_,"``clevergo.tech/clevergo*``, ``github.com/clevergo/clevergo*``",,,2
`Couchbase official client(gocb) <https://github.com/couchbase/gocb>`_,"``github.com/couchbase/gocb*``, ``gopkg.in/couchbase/gocb*``",,36,16
`Couchbase unofficial client <http://www.github.com/couchbase/go-couchbase>`_,``github.com/couchbaselabs/gocb*``,,18,8
`Couchbase official client(gocb) <https://github.com/couchbase/gocb>`_,"``github.com/couchbase/gocb*``, ``gopkg.in/couchbase/gocb*``",44,96,16
`Couchbase unofficial client <http://www.github.com/couchbase/go-couchbase>`_,``github.com/couchbaselabs/gocb*``,22,48,8
`Echo <https://echo.labstack.com/>`_,``github.com/labstack/echo*``,12,2,3
`Fiber <https://github.com/gofiber/fiber>`_,``github.com/gofiber/fiber*``,,,5
`Fosite <https://github.com/ory/fosite>`_,``github.com/ory/fosite*``,,,2
@@ -74,5 +74,5 @@ Go framework & library support
`yaml <https://gopkg.in/yaml.v3>`_,``gopkg.in/yaml*``,,9,
`zap <https://go.uber.org/zap>`_,``go.uber.org/zap*``,,11,33
Others,``github.com/kanikanema/gorqlite``,8,2,24
Totals,,494,958,1556
Totals,,560,1048,1556

View File

@@ -0,0 +1,5 @@
---
category: minorAnalysis
---
* `database` source models have been added for v1 and v2 of the `github.com/couchbase/gocb` package.

View File

@@ -9,6 +9,32 @@ extensions:
- ["gocb2", "github.com/couchbase/gocb/v2"]
- ["gocb2", "gopkg.in/couchbase/gocb.v2"]
- ["gocb2", "github.com/couchbaselabs/gocb/v2"]
- addsTo:
pack: codeql/go-all
extensible: sourceModel
data:
- ["group:gocb1", "Cluster", True, "ExecuteAnalyticsQuery", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb1", "Cluster", True, "ExecuteN1qlQuery", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb1", "Cluster", True, "ExecuteSearchQuery", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "Cluster", True, "AnalyticsQuery", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "Cluster", True, "Query", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "Collection", True, "Get", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "Collection", True, "GetAndLock", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "Collection", True, "GetAndTouch", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "Collection", True, "GetAnyReplica", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "Collection", True, "LookupIn", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "Collection", True, "LookupInAllReplicas", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "Collection", True, "LookupInAnyReplica", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "Collection", True, "Scan", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "Scope", True, "AnalyticsQuery", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "Scope", True, "Query", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "TransactionAttemptContext", True, "Get", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "TransactionAttemptContext", True, "GetReplicaFromPreferredServerGroup", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "TransactionAttemptContext", True, "Insert", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "TransactionAttemptContext", True, "Query", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "TransactionAttemptContext", True, "Replace", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "ViewIndexManager", True, "GetAllDesignDocuments", "", "", "ReturnValue[0]", "database", "manual"]
- ["group:gocb2", "ViewIndexManager", True, "GetDesignDocument", "", "", "ReturnValue[0]", "database", "manual"]
- addsTo:
pack: codeql/go-all
extensible: sinkModel
@@ -27,6 +53,9 @@ extensions:
data:
- ["group:gocb1", "", False, "NewAnalyticsQuery", "", "", "Argument[0]", "ReturnValue", "taint", "manual"]
- ["group:gocb1", "", False, "NewN1qlQuery", "", "", "Argument[0]", "ReturnValue", "taint", "manual"]
- ["group:gocb1", "AnalyticsResults", True, "One", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb1", "AnalyticsResults", True, "Next", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb1", "AnalyticsResults", True, "NextBytes", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb1", "AnalyticsQuery", True, "ContextId", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb1", "AnalyticsQuery", True, "Deferred", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb1", "AnalyticsQuery", True, "Pretty", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
@@ -43,3 +72,30 @@ extensions:
- ["group:gocb1", "N1qlQuery", True, "ReadOnly", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb1", "N1qlQuery", True, "ScanCap", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb1", "N1qlQuery", True, "Timeout", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb1", "QueryResults", True, "One", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb1", "QueryResults", True, "Next", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb1", "QueryResults", True, "NextBytes", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb1", "SearchResults", True, "Hits", "", "", "Argument[receiver]", "ReturnValue.ArrayElement", "taint", "manual"]
- ["group:gocb2", "AnalyticsResult", True, "One", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb2", "AnalyticsResult", True, "Raw", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb2", "AnalyticsResult", True, "Row", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb2", "AnalyticsResultRaw", True, "NextBytes", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb2", "GetResult", True, "Content", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb2", "LookupInAllReplicasResult", True, "Next", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb2", "LookupInResult", True, "ContentAt", "", "", "Argument[receiver]", "Argument[1]", "taint", "manual"]
- ["group:gocb2", "MutateInResult", True, "ContentAt", "", "", "Argument[receiver]", "Argument[1]", "taint", "manual"]
- ["group:gocb2", "QueryResult", True, "One", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb2", "QueryResult", True, "Raw", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb2", "QueryResult", True, "Row", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb2", "QueryResultRaw", True, "NextBytes", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb2", "ScanResult", True, "Next", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb2", "ScanResultItem", True, "Content", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb2", "SearchResult", True, "Raw", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb2", "SearchResult", True, "Row", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb2", "SearchResultRaw", True, "NextBytes", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb2", "TransactionGetResult", True, "Content", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb2", "TransactionQueryResult", True, "One", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb2", "TransactionQueryResult", True, "Row", "", "", "Argument[receiver]", "Argument[0]", "taint", "manual"]
- ["group:gocb2", "ViewResult", True, "Raw", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb2", "ViewResult", True, "Row", "", "", "Argument[receiver]", "ReturnValue", "taint", "manual"]
- ["group:gocb2", "ViewResultRaw", True, "NextBytes", "", "", "Argument[receiver]", "ReturnValue[0]", "taint", "manual"]

View File

@@ -357,6 +357,23 @@ module RegexpReplaceFunction {
class LoggerCall extends DataFlow::Node instanceof LoggerCall::Range {
/** Gets a node that is a part of the logged message. */
DataFlow::Node getAMessageComponent() { result = super.getAMessageComponent() }
/**
* Gets a node whose value is a part of the logged message.
*
* Components corresponding to the format specifier "%T" are excluded as
* their type is logged rather than their value.
*/
DataFlow::Node getAValueFormattedMessageComponent() {
result = this.getAMessageComponent() and
not exists(string formatSpecifier |
result = this.(StringOps::Formatting::StringFormatCall).getOperand(_, formatSpecifier) and
// We already know that `formatSpecifier` starts with `%`, so we check
// that it ends with `T` to confirm that it is `%T` or possibly some
// variation on it.
formatSpecifier.matches("%T")
)
}
}
/** Provides a class for modeling new logging APIs. */

View File

@@ -40,7 +40,7 @@ module CleartextLogging {
* An argument to a logging mechanism.
*/
class LoggerSink extends Sink {
LoggerSink() { this = any(LoggerCall log).getAMessageComponent() }
LoggerSink() { this = any(LoggerCall log).getAValueFormattedMessageComponent() }
}
/**

View File

@@ -35,7 +35,7 @@ module LogInjection {
/** An argument to a logging mechanism. */
class LoggerSink extends Sink {
LoggerSink() { this = any(LoggerCall log).getAMessageComponent() }
LoggerSink() { this = any(LoggerCall log).getAValueFormattedMessageComponent() }
}
/**

View File

@@ -138,7 +138,9 @@ predicate privateUrlFlowsToAuthCodeUrlCall(DataFlow::CallNode call) {
module FlowToPrintConfig implements DataFlow::ConfigSig {
additional predicate isSinkCall(DataFlow::Node sink, DataFlow::CallNode call) {
exists(LoggerCall logCall | call = logCall | sink = logCall.getAMessageComponent())
exists(LoggerCall logCall | call = logCall |
sink = logCall.getAValueFormattedMessageComponent()
)
}
predicate isSource(DataFlow::Node source) { source = any(AuthCodeUrl m).getACall().getResult() }

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* False positives in "Log entries created from user input" (`go/log-injection`) and "Clear-text logging of sensitive information" (`go/clear-text-logging`) which involved the verb `%T` in a format specifier have been fixed. As a result, some users may also see more alerts from the "Use of constant `state` value in OAuth 2.0 URL" (`go/constant-oauth2-state`) query.

View File

@@ -4,14 +4,20 @@ import ModelValidation
import utils.test.InlineExpectationsTest
module LoggerTest implements TestSig {
string getARelevantTag() { result = "logger" }
string getARelevantTag() { result = ["type-logger", "logger"] }
predicate hasActualResult(Location location, string element, string tag, string value) {
exists(LoggerCall log |
log.getLocation() = location and
element = log.toString() and
value = log.getAMessageComponent().toString() and
tag = "logger"
(
value = log.getAValueFormattedMessageComponent().toString() and
tag = "logger"
or
value = log.getAMessageComponent().toString() and
not value = log.getAValueFormattedMessageComponent().toString() and
tag = "type-logger"
)
)
}
}

View File

@@ -30,6 +30,13 @@ func glogTest() {
glog.Warningf(fmt, text) // $ logger=fmt logger=text
glog.Warningln(text) // $ logger=text
// components corresponding to the format specifier "%T" are not considered vulnerable
glog.Errorf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
glog.Exitf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
glog.Fatalf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
glog.Infof("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
glog.Warningf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
klog.Error(text) // $ logger=text
klog.ErrorDepth(0, text) // $ logger=text
klog.Errorf(fmt, text) // $ logger=fmt logger=text
@@ -50,4 +57,11 @@ func glogTest() {
klog.WarningDepth(0, text) // $ logger=text
klog.Warningf(fmt, text) // $ logger=fmt logger=text
klog.Warningln(text) // $ logger=text
// components corresponding to the format specifier "%T" are not considered vulnerable
klog.Errorf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
klog.Exitf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
klog.Fatalf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
klog.Infof("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
klog.Warningf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
}

View File

@@ -32,4 +32,8 @@ func logrusCalls() {
logrus.Panicln(text) // $ logger=text
logrus.Infof(fmt, text) // $ logger=fmt logger=text
logrus.FatalFn(fn) // $ logger=fn
// components corresponding to the format specifier "%T" are not considered vulnerable
logrus.Infof("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
logrus.Fatalf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
}

View File

@@ -3,6 +3,8 @@ package main
const fmt = "formatted %s string"
const text = "test"
func main() {
var v []byte
func main() {
stdlib()
}

View File

@@ -17,6 +17,11 @@ func stdlib() {
logger.Printf(fmt, text) // $ logger=fmt logger=text
logger.Println(text) // $ logger=text
// components corresponding to the format specifier "%T" are not considered vulnerable
logger.Fatalf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
logger.Panicf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
logger.Printf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
log.SetPrefix("prefix: ")
log.Fatal(text) // $ logger=text
log.Fatalf(fmt, text) // $ logger=fmt logger=text
@@ -27,4 +32,9 @@ func stdlib() {
log.Print(text) // $ logger=text
log.Printf(fmt, text) // $ logger=fmt logger=text
log.Println(text) // $ logger=text
// components corresponding to the format specifier "%T" are not considered vulnerable
log.Fatalf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
log.Panicf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
log.Printf("%s: found type %T", text, v) // $ logger="%s: found type %T" logger=text type-logger=v
}

View File

@@ -1,9 +1,57 @@
module test
go 1.22.5
go 1.24
require (
gorm.io/gorm v1.23.0
github.com/astaxie/beego v1.12.3
github.com/beego/beego/v2 v2.3.5
github.com/couchbase/gocb v1.6.7
github.com/couchbase/gocb/v2 v2.9.4
github.com/jmoiron/sqlx v1.4.0
go.mongodb.org/mongo-driver/mongo v1.17.2
github.com/rqlite/gorqlite v0.0.0-20250128004930-114c7828b55a
go.mongodb.org/mongo-driver v1.17.3
gorm.io/gorm v1.25.12
)
require (
github.com/couchbase/gocbcore/v10 v10.5.4 // indirect
github.com/couchbase/gocbcoreps v0.1.3 // indirect
github.com/couchbase/goprotostellar v1.0.2 // indirect
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0-20240607131231-fb385523de28 // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
github.com/hashicorp/golang-lru v0.5.4 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/klauspost/compress v1.16.7 // indirect
github.com/montanaflynn/stats v0.7.1 // indirect
github.com/opentracing/opentracing-go v1.2.0 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/shiena/ansicolor v0.0.0-20200904210342-c7312218db18 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect
go.opentelemetry.io/otel v1.24.0 // indirect
go.opentelemetry.io/otel/metric v1.24.0 // indirect
go.opentelemetry.io/otel/trace v1.24.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
go.uber.org/zap v1.27.0 // indirect
golang.org/x/crypto v0.26.0 // indirect
golang.org/x/net v0.24.0 // indirect
golang.org/x/sync v0.8.0 // indirect
golang.org/x/sys v0.23.0 // indirect
golang.org/x/text v0.17.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda // indirect
google.golang.org/grpc v1.63.2 // indirect
google.golang.org/protobuf v1.34.2 // indirect
gopkg.in/couchbase/gocbcore.v7 v7.1.18 // indirect
gopkg.in/couchbaselabs/gocbconnstr.v1 v1.0.4 // indirect
gopkg.in/couchbaselabs/gojcbmock.v1 v1.0.4 // indirect
gopkg.in/couchbaselabs/jsonx.v1 v1.0.1 // indirect
)

View File

@@ -1,5 +1,8 @@
package test
//go:generate depstubber -vendor github.com/astaxie/beego/orm Ormer NewOrm
//go:generate depstubber -vendor github.com/beego/beego/v2/client/orm DB,DQL,Ormer NewOrm
import (
oldOrm "github.com/astaxie/beego/orm"
"github.com/beego/beego/v2/client/orm"

View File

@@ -0,0 +1,57 @@
package test
//go:generate depstubber -vendor github.com/couchbase/gocb Cluster,AnalyticsResults,QueryResults,SearchResults
import "github.com/couchbase/gocb"
func test_couchbase_gocb_v1_Cluster(cluster *gocb.Cluster, aq *gocb.AnalyticsQuery, n1ql *gocb.N1qlQuery, sq *gocb.SearchQuery) {
// Analytics
r1, err := cluster.ExecuteAnalyticsQuery(aq, nil) // $ source
if err != nil {
return
}
var user1, user2 User
r1.One(&user1)
sink(user1) // $ hasTaintFlow="user1"
for r1.Next(user2) {
sink(user2) // $ hasTaintFlow="user2"
}
var b1 []byte
b1 = r1.NextBytes()
sink(b1) // $ hasTaintFlow="b1"
// N1QL
r2, err := cluster.ExecuteN1qlQuery(n1ql, nil) // $ source
if err != nil {
return
}
var user3, user4 User
r2.One(&user3)
sink(user3) // $ hasTaintFlow="user3"
for r2.Next(user4) {
sink(user4) // $ hasTaintFlow="user4"
}
var b2 []byte
b2 = r2.NextBytes()
sink(b2) // $ hasTaintFlow="b2"
// Search
r3, err := cluster.ExecuteSearchQuery(sq) // $ source
if err != nil {
return
}
hit := r3.Hits()[0]
sink(hit) // $ hasTaintFlow="hit"
}

View File

@@ -0,0 +1,247 @@
package test
//go:generate depstubber -vendor github.com/couchbase/gocb/v2 AnalyticsResult,AnalyticsResultRaw,Cluster,Collection,ExistsResult,GetResult,LookupInReplicaResult,LookupInResult,MutateInResult,MutationResult,QueryResult,QueryResultRaw,Result,ScanResult,ScanResultItem,Scope,SearchResult,SearchResultRaw,TransactionAttemptContext,TransactionGetResult,TransactionQueryResult,ViewIndexManager,ViewResult,ViewResultRaw
import "github.com/couchbase/gocb/v2"
func test_couchbase_gocb_v2_Cluster(cluster *gocb.Cluster) {
r1, err := cluster.AnalyticsQuery("SELECT * FROM `travel-sample`", nil) // $ source
if err != nil {
return
}
for r1.Next() {
var name1, name2 string
r1.One(&name1)
sink(name1) // $ hasTaintFlow="name1"
r1.Row(&name2)
sink(name2) // $ hasTaintFlow="name2"
b := r1.Raw().NextBytes()
sink(b) // $ hasTaintFlow="b"
}
r2, err := cluster.Query("SELECT * FROM `travel-sample`", nil) // $ source
if err != nil {
return
}
for r2.Next() {
var name1, name2 string
r2.One(&name1)
sink(name1) // $ hasTaintFlow="name1"
r2.Row(&name2)
sink(name2) // $ hasTaintFlow="name2"
b := r2.Raw().NextBytes()
sink(b) // $ hasTaintFlow="b"
}
}
func test_couchbase_gocb_v2_Scope(scope *gocb.Scope) {
r1, err := scope.AnalyticsQuery("SELECT * FROM `travel-sample`", nil) // $ source
if err != nil {
return
}
for r1.Next() {
var name1, name2 string
r1.One(&name1)
sink(name1) // $ hasTaintFlow="name1"
r1.Row(&name2)
sink(name2) // $ hasTaintFlow="name2"
b := r1.Raw().NextBytes()
sink(b) // $ hasTaintFlow="b"
}
r2, err := scope.Query("SELECT * FROM `travel-sample`", nil) // $ source
if err != nil {
return
}
for r2.Next() {
var name1, name2 string
r2.One(&name1)
sink(name1) // $ hasTaintFlow="name1"
r2.Row(&name2)
sink(name2) // $ hasTaintFlow="name2"
b := r2.Raw().NextBytes()
sink(b) // $ hasTaintFlow="b"
}
}
func test_couchbase_gocb_v2_Collection(coll *gocb.Collection) {
type User struct {
Name string
}
var user User
r1, err := coll.Get("documentID", nil) // $ source
if err != nil {
return
}
r1.Content(&user)
sink(user) // $ hasTaintFlow="user"
r2, err := coll.GetAndLock("documentID", 30, nil) // $ source
if err != nil {
return
}
sink(r2) // $ hasTaintFlow="r2"
r3, err := coll.GetAndTouch("documentID", 30, nil) // $ source
if err != nil {
return
}
var user3 User
r3.Content(&user3)
sink(user3) // $ hasTaintFlow="user3"
r4, err := coll.GetAnyReplica("documentID", nil) // $ source
if err != nil {
return
}
sink(r4) // $ hasTaintFlow="r4"
r5, err := coll.LookupIn("documentID", []gocb.LookupInSpec{}, nil) // $ source
if err != nil {
return
}
var user5 User
r5.ContentAt(0, &user5)
sink(user5) // $ hasTaintFlow="user5"
r6, err := coll.LookupInAllReplicas("documentID", []gocb.LookupInSpec{}, nil) // $ source
if err != nil {
return
}
var user6 User
r6.Next().ContentAt(0, &user6)
sink(user6) // $ hasTaintFlow="user6"
r7, err := coll.LookupInAnyReplica("documentID", []gocb.LookupInSpec{}, nil) // $ source
if err != nil {
return
}
var user7 User
r7.ContentAt(0, &user7)
sink(user7) // $ hasTaintFlow="user7"
r8, err := coll.Scan(nil, nil) // $ source
if err != nil {
return
}
var user8 User
r8.Next().Content(&user8)
sink(user8) // $ hasTaintFlow="user8"
}
func test_couchbase_gocb_v2_TransactionAttemptContext(tam *gocb.TransactionAttemptContext, coll *gocb.Collection) {
r1, err := tam.Get(coll, "documentID") // $ source
if err != nil {
return
}
var user User
r1.Content(&user)
sink(user) // $ hasTaintFlow="user"
r2, err := tam.GetReplicaFromPreferredServerGroup(coll, "documentID") // $ source
if err != nil {
return
}
var user2 User
r2.Content(&user2)
sink(user2) // $ hasTaintFlow="user2"
var user3 User
r3, err := tam.Insert(coll, "documentID", &user3) // $ source
if err != nil {
return
}
var user4 User
r3.Content(&user4)
sink(user4) // $ hasTaintFlow="user4"
r4, err := tam.Query("SELECT * FROM `travel-sample`", nil) // $ source
if err != nil {
return
}
for r4.Next() {
var user5 User
r4.One(&user5)
sink(user5) // $ hasTaintFlow="user5"
var user6 User
r4.Row(&user6)
sink(user6) // $ hasTaintFlow="user6"
}
r5, err := tam.Replace(r3, user4) // $ source
if err != nil {
return
}
sink(r5) // $ hasTaintFlow="r5"
}
func test_couchbase_gocb_v2_ViewIndexManager(v *gocb.ViewIndexManager) {
doc, err := v.GetDesignDocument("name", 0, nil) // $ source
if err != nil {
return
}
sink(doc) // $ hasTaintFlow="doc"
docs, err := v.GetAllDesignDocuments(0, nil) // $ source
if err != nil {
return
}
sink(docs) // $ hasTaintFlow="docs"
}

View File

@@ -1,5 +1,7 @@
package test
//go:generate depstubber -vendor gorm.io/gorm Association,ConnPool,DB
import "gorm.io/gorm"
// test querying an Association

View File

@@ -1,5 +1,7 @@
package test
//go:generate depstubber -vendor github.com/jmoiron/sqlx Conn,DB,NamedStmt,Stmt,Tx Get,GetContext,NamedQuery,NamedQueryContext,Select,SelectContext
import (
"context"

View File

@@ -2,7 +2,7 @@
// This is a simple stub for github.com/beego/beego/v2/client/orm, strictly for use in testing.
// See the LICENSE file for information about the licensing of the original library.
// Source: github.com/beego/beego/v2/client/orm (exports: DQL,DB,Ormer; functions: NewOrm)
// Source: github.com/beego/beego/v2/client/orm (exports: DB,DQL,Ormer; functions: NewOrm)
// Package orm is a stub of github.com/beego/beego/v2/client/orm, generated by depstubber.
package orm
@@ -56,30 +56,10 @@ func (_ *Condition) OrNotCond(_ *Condition) *Condition {
}
type DB struct {
RWMutex *sync.RWMutex
DB *sql.DB
*sync.RWMutex
DB *sql.DB
}
func (_ DB) Lock() {}
func (_ DB) RLock() {}
func (_ DB) RLocker() sync.Locker {
return nil
}
func (_ DB) RUnlock() {}
func (_ DB) TryLock() bool {
return false
}
func (_ DB) TryRLock() bool {
return false
}
func (_ DB) Unlock() {}
func (_ *DB) Begin() (*sql.Tx, error) {
return nil, nil
}

View File

@@ -1,26 +0,0 @@
package sqlx
import (
"context"
"database/sql"
)
type Conn struct {
*sql.Conn
}
func (c *Conn) GetContext(ctx context.Context, dest interface{}, query string, args ...interface{}) error {
return nil
}
func (c *Conn) SelectContext(ctx context.Context, dest interface{}, query string, args ...interface{}) error {
return nil
}
func (c *Conn) QueryRowxContext(ctx context.Context, query string, args ...interface{}) *Row {
return nil
}
func (c *Conn) QueryxContext(ctx context.Context, query string, args ...interface{}) (*Rows, error) {
return nil, nil
}

View File

@@ -1,52 +0,0 @@
package sqlx
import (
"context"
"database/sql"
)
type DB struct {
*sql.DB
// Mapper *reflectx.Mapper
}
func (db *DB) Get(dest interface{}, query string, args ...interface{}) error {
return nil
}
func (db *DB) GetContext(ctx context.Context, dest interface{}, query string, args ...interface{}) error {
return nil
}
func (db *DB) QueryRowx(query string, args ...interface{}) *Row {
return nil
}
func (db *DB) QueryRowxContext(ctx context.Context, query string, args ...interface{}) *Row {
return nil
}
func (db *DB) Queryx(query string, args ...interface{}) (*Rows, error) {
return nil, nil
}
func (db *DB) QueryxContext(ctx context.Context, query string, args ...interface{}) (*Rows, error) {
return nil, nil
}
func (db *DB) Select(dest interface{}, query string, args ...interface{}) error {
return nil
}
func (db *DB) SelectContext(ctx context.Context, dest interface{}, query string, args ...interface{}) error {
return nil
}
func (db *DB) NamedQuery(query string, arg interface{}) (*Rows, error) {
return nil, nil
}
func (db *DB) NamedQueryContext(ctx context.Context, query string, arg interface{}) (*Rows, error) {
return nil, nil
}

View File

@@ -1,60 +0,0 @@
package sqlx
import (
"context"
"database/sql"
)
type NamedStmt struct {
Params []string
QueryString string
Stmt *sql.Stmt
}
func (s *NamedStmt) Get(dest interface{}, args ...interface{}) error {
return nil
}
func (s *NamedStmt) GetContext(ctx context.Context, dest interface{}, args ...interface{}) error {
return nil
}
func (s *NamedStmt) QueryRow(args ...interface{}) *Row {
return nil
}
func (s *NamedStmt) QueryRowContext(ctx context.Context, args ...interface{}) *Row {
return nil
}
func (s *NamedStmt) Query(args ...interface{}) (*Rows, error) {
return nil, nil
}
func (s *NamedStmt) QueryContext(ctx context.Context, args ...interface{}) (*Rows, error) {
return nil, nil
}
func (s *NamedStmt) QueryRowx(args ...interface{}) *Row {
return nil
}
func (s *NamedStmt) QueryRowxContext(ctx context.Context, args ...interface{}) *Row {
return nil
}
func (s *NamedStmt) Queryx(args ...interface{}) (*Rows, error) {
return nil, nil
}
func (s *NamedStmt) QueryxContext(ctx context.Context, args ...interface{}) (*Rows, error) {
return nil, nil
}
func (s *NamedStmt) Select(dest interface{}, args ...interface{}) error {
return nil
}
func (s *NamedStmt) SelectContext(ctx context.Context, dest interface{}, args ...interface{}) error {
return nil
}

View File

@@ -1,21 +0,0 @@
package sqlx
type Row struct {
// Mapper *reflectx.Mapper
}
func (r *Row) MapScan(dest map[string]interface{}) error {
return nil
}
func (r *Row) StructScan(dest interface{}) error {
return nil
}
func (r *Row) SliceScan(dest []interface{}) error {
return nil
}
func (r *Row) Scan(dest ...interface{}) error {
return nil
}

View File

@@ -1,22 +0,0 @@
package sqlx
import "database/sql"
type Rows struct {
*sql.Rows
// Mapper *reflectx.Mapper
// contains filtered or unexported fields
}
func (r *Rows) MapScan(dest map[string]interface{}) error {
return nil
}
func (r *Rows) StructScan(dest interface{}) error {
return nil
}
func (r *Rows) SliceScan(dest []interface{}) error {
return nil
}

View File

@@ -1,42 +0,0 @@
package sqlx
import (
"context"
"database/sql"
)
type Stmt struct {
*sql.Stmt
}
func (s *Stmt) Get(dest interface{}, args ...interface{}) error {
return nil
}
func (s *Stmt) GetContext(ctx context.Context, dest interface{}, args ...interface{}) error {
return nil
}
func (s *Stmt) QueryRowx(args ...interface{}) *Row {
return nil
}
func (s *Stmt) QueryRowxContext(ctx context.Context, args ...interface{}) *Row {
return nil
}
func (s *Stmt) Queryx(args ...interface{}) (*Rows, error) {
return nil, nil
}
func (s *Stmt) QueryxContext(ctx context.Context, args ...interface{}) (*Rows, error) {
return nil, nil
}
func (s *Stmt) Select(dest interface{}, args ...interface{}) error {
return nil
}
func (s *Stmt) SelectContext(ctx context.Context, dest interface{}, args ...interface{}) error {
return nil
}

View File

@@ -1,67 +1,530 @@
// Code generated by depstubber. DO NOT EDIT.
// This is a simple stub for github.com/jmoiron/sqlx, strictly for use in testing.
// See the LICENSE file for information about the licensing of the original library.
// Source: github.com/jmoiron/sqlx (exports: Conn,DB,NamedStmt,Stmt,Tx; functions: Get,GetContext,NamedQuery,NamedQueryContext,Select,SelectContext)
// Package sqlx is a stub of github.com/jmoiron/sqlx, generated by depstubber.
package sqlx
import (
"context"
"database/sql"
context "context"
sql "database/sql"
)
type ColScanner interface {
Columns() ([]string, error)
Scan(dest ...interface{}) error
Err() error
type Conn struct {
*sql.Conn
Mapper interface{}
}
type Execer interface {
Exec(query string, args ...interface{}) (sql.Result, error)
func (_ Conn) BeginTx(_ context.Context, _ *sql.TxOptions) (*sql.Tx, error) {
return nil, nil
}
type ExecerContext interface {
ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error)
func (_ Conn) Close() error {
return nil
}
func (_ Conn) ExecContext(_ context.Context, _ string, _ ...interface{}) (sql.Result, error) {
return nil, nil
}
func (_ Conn) PingContext(_ context.Context) error {
return nil
}
func (_ Conn) PrepareContext(_ context.Context, _ string) (*sql.Stmt, error) {
return nil, nil
}
func (_ Conn) QueryContext(_ context.Context, _ string, _ ...interface{}) (*sql.Rows, error) {
return nil, nil
}
func (_ Conn) QueryRowContext(_ context.Context, _ string, _ ...interface{}) *sql.Row {
return nil
}
func (_ Conn) Raw(_ func(interface{}) error) error {
return nil
}
func (_ *Conn) BeginTxx(_ context.Context, _ *sql.TxOptions) (*Tx, error) {
return nil, nil
}
func (_ *Conn) GetContext(_ context.Context, _ interface{}, _ string, _ ...interface{}) error {
return nil
}
func (_ *Conn) PreparexContext(_ context.Context, _ string) (*Stmt, error) {
return nil, nil
}
func (_ *Conn) QueryRowxContext(_ context.Context, _ string, _ ...interface{}) *Row {
return nil
}
func (_ *Conn) QueryxContext(_ context.Context, _ string, _ ...interface{}) (*Rows, error) {
return nil, nil
}
func (_ *Conn) Rebind(_ string) string {
return ""
}
func (_ *Conn) SelectContext(_ context.Context, _ interface{}, _ string, _ ...interface{}) error {
return nil
}
type DB struct {
*sql.DB
Mapper interface{}
}
func (_ *DB) BeginTxx(_ context.Context, _ *sql.TxOptions) (*Tx, error) {
return nil, nil
}
func (_ *DB) Beginx() (*Tx, error) {
return nil, nil
}
func (_ *DB) BindNamed(_ string, _ interface{}) (string, []interface{}, error) {
return "", nil, nil
}
func (_ *DB) Connx(_ context.Context) (*Conn, error) {
return nil, nil
}
func (_ *DB) DriverName() string {
return ""
}
func (_ *DB) Get(_ interface{}, _ string, _ ...interface{}) error {
return nil
}
func (_ *DB) GetContext(_ context.Context, _ interface{}, _ string, _ ...interface{}) error {
return nil
}
func (_ *DB) MapperFunc(_ func(string) string) {}
func (_ *DB) MustBegin() *Tx {
return nil
}
func (_ *DB) MustBeginTx(_ context.Context, _ *sql.TxOptions) *Tx {
return nil
}
func (_ *DB) MustExec(_ string, _ ...interface{}) sql.Result {
return nil
}
func (_ *DB) MustExecContext(_ context.Context, _ string, _ ...interface{}) sql.Result {
return nil
}
func (_ *DB) NamedExec(_ string, _ interface{}) (sql.Result, error) {
return nil, nil
}
func (_ *DB) NamedExecContext(_ context.Context, _ string, _ interface{}) (sql.Result, error) {
return nil, nil
}
func (_ *DB) NamedQuery(_ string, _ interface{}) (*Rows, error) {
return nil, nil
}
func (_ *DB) NamedQueryContext(_ context.Context, _ string, _ interface{}) (*Rows, error) {
return nil, nil
}
func (_ *DB) PrepareNamed(_ string) (*NamedStmt, error) {
return nil, nil
}
func (_ *DB) PrepareNamedContext(_ context.Context, _ string) (*NamedStmt, error) {
return nil, nil
}
func (_ *DB) Preparex(_ string) (*Stmt, error) {
return nil, nil
}
func (_ *DB) PreparexContext(_ context.Context, _ string) (*Stmt, error) {
return nil, nil
}
func (_ *DB) QueryRowx(_ string, _ ...interface{}) *Row {
return nil
}
func (_ *DB) QueryRowxContext(_ context.Context, _ string, _ ...interface{}) *Row {
return nil
}
func (_ *DB) Queryx(_ string, _ ...interface{}) (*Rows, error) {
return nil, nil
}
func (_ *DB) QueryxContext(_ context.Context, _ string, _ ...interface{}) (*Rows, error) {
return nil, nil
}
func (_ *DB) Rebind(_ string) string {
return ""
}
func (_ *DB) Select(_ interface{}, _ string, _ ...interface{}) error {
return nil
}
func (_ *DB) SelectContext(_ context.Context, _ interface{}, _ string, _ ...interface{}) error {
return nil
}
func (_ *DB) Unsafe() *DB {
return nil
}
type Ext interface {
Queryer
Execer
BindNamed(_ string, _ interface{}) (string, []interface{}, error)
DriverName() string
Exec(_ string, _ ...interface{}) (sql.Result, error)
Query(_ string, _ ...interface{}) (*sql.Rows, error)
QueryRowx(_ string, _ ...interface{}) *Row
Queryx(_ string, _ ...interface{}) (*Rows, error)
Rebind(_ string) string
}
type ExtContext interface {
QueryerContext
ExecerContext
// contains filtered or unexported methods
BindNamed(_ string, _ interface{}) (string, []interface{}, error)
DriverName() string
ExecContext(_ context.Context, _ string, _ ...interface{}) (sql.Result, error)
QueryContext(_ context.Context, _ string, _ ...interface{}) (*sql.Rows, error)
QueryRowxContext(_ context.Context, _ string, _ ...interface{}) *Row
QueryxContext(_ context.Context, _ string, _ ...interface{}) (*Rows, error)
Rebind(_ string) string
}
func Get(_ Queryer, _ interface{}, _ string, _ ...interface{}) error {
return nil
}
func GetContext(_ context.Context, _ QueryerContext, _ interface{}, _ string, _ ...interface{}) error {
return nil
}
func NamedQuery(_ Ext, _ string, _ interface{}) (*Rows, error) {
return nil, nil
}
func NamedQueryContext(_ context.Context, _ ExtContext, _ string, _ interface{}) (*Rows, error) {
return nil, nil
}
type NamedStmt struct {
Params []string
QueryString string
Stmt *Stmt
}
func (_ *NamedStmt) Close() error {
return nil
}
func (_ *NamedStmt) Exec(_ interface{}) (sql.Result, error) {
return nil, nil
}
func (_ *NamedStmt) ExecContext(_ context.Context, _ interface{}) (sql.Result, error) {
return nil, nil
}
func (_ *NamedStmt) Get(_ interface{}, _ interface{}) error {
return nil
}
func (_ *NamedStmt) GetContext(_ context.Context, _ interface{}, _ interface{}) error {
return nil
}
func (_ *NamedStmt) MustExec(_ interface{}) sql.Result {
return nil
}
func (_ *NamedStmt) MustExecContext(_ context.Context, _ interface{}) sql.Result {
return nil
}
func (_ *NamedStmt) Query(_ interface{}) (*sql.Rows, error) {
return nil, nil
}
func (_ *NamedStmt) QueryContext(_ context.Context, _ interface{}) (*sql.Rows, error) {
return nil, nil
}
func (_ *NamedStmt) QueryRow(_ interface{}) *Row {
return nil
}
func (_ *NamedStmt) QueryRowContext(_ context.Context, _ interface{}) *Row {
return nil
}
func (_ *NamedStmt) QueryRowx(_ interface{}) *Row {
return nil
}
func (_ *NamedStmt) QueryRowxContext(_ context.Context, _ interface{}) *Row {
return nil
}
func (_ *NamedStmt) Queryx(_ interface{}) (*Rows, error) {
return nil, nil
}
func (_ *NamedStmt) QueryxContext(_ context.Context, _ interface{}) (*Rows, error) {
return nil, nil
}
func (_ *NamedStmt) Select(_ interface{}, _ interface{}) error {
return nil
}
func (_ *NamedStmt) SelectContext(_ context.Context, _ interface{}, _ interface{}) error {
return nil
}
func (_ *NamedStmt) Unsafe() *NamedStmt {
return nil
}
type Queryer interface {
Query(query string, args ...interface{}) (*sql.Rows, error)
Queryx(query string, args ...interface{}) (*Rows, error)
QueryRowx(query string, args ...interface{}) *Row
Query(_ string, _ ...interface{}) (*sql.Rows, error)
QueryRowx(_ string, _ ...interface{}) *Row
Queryx(_ string, _ ...interface{}) (*Rows, error)
}
type QueryerContext interface {
QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error)
QueryxContext(ctx context.Context, query string, args ...interface{}) (*Rows, error)
QueryRowxContext(ctx context.Context, query string, args ...interface{}) *Row
QueryContext(_ context.Context, _ string, _ ...interface{}) (*sql.Rows, error)
QueryRowxContext(_ context.Context, _ string, _ ...interface{}) *Row
QueryxContext(_ context.Context, _ string, _ ...interface{}) (*Rows, error)
}
func NamedQuery(e Ext, query string, arg interface{}) (*Rows, error) {
return e.Queryx(query, arg)
type Row struct {
Mapper interface{}
}
func NamedQueryContext(ctx context.Context, e ExtContext, query string, arg interface{}) (*Rows, error) {
return e.QueryxContext(ctx, query, arg)
func (_ *Row) ColumnTypes() ([]*sql.ColumnType, error) {
return nil, nil
}
func Get(q Queryer, dest interface{}, query string, args ...interface{}) error {
func (_ *Row) Columns() ([]string, error) {
return nil, nil
}
func (_ *Row) Err() error {
return nil
}
func GetContext(ctx context.Context, q QueryerContext, dest interface{}, query string, args ...interface{}) error {
func (_ *Row) MapScan(_ map[string]interface{}) error {
return nil
}
func Select(q Queryer, dest interface{}, query string, args ...interface{}) error {
func (_ *Row) Scan(_ ...interface{}) error {
return nil
}
func SelectContext(ctx context.Context, q QueryerContext, dest interface{}, query string, args ...interface{}) error {
func (_ *Row) SliceScan() ([]interface{}, error) {
return nil, nil
}
func (_ *Row) StructScan(_ interface{}) error {
return nil
}
type Rows struct {
*sql.Rows
Mapper interface{}
}
func (_ *Rows) MapScan(_ map[string]interface{}) error {
return nil
}
func (_ *Rows) SliceScan() ([]interface{}, error) {
return nil, nil
}
func (_ *Rows) StructScan(_ interface{}) error {
return nil
}
func Select(_ Queryer, _ interface{}, _ string, _ ...interface{}) error {
return nil
}
func SelectContext(_ context.Context, _ QueryerContext, _ interface{}, _ string, _ ...interface{}) error {
return nil
}
type Stmt struct {
*sql.Stmt
Mapper interface{}
}
func (_ *Stmt) Get(_ interface{}, _ ...interface{}) error {
return nil
}
func (_ *Stmt) GetContext(_ context.Context, _ interface{}, _ ...interface{}) error {
return nil
}
func (_ *Stmt) MustExec(_ ...interface{}) sql.Result {
return nil
}
func (_ *Stmt) MustExecContext(_ context.Context, _ ...interface{}) sql.Result {
return nil
}
func (_ *Stmt) QueryRowx(_ ...interface{}) *Row {
return nil
}
func (_ *Stmt) QueryRowxContext(_ context.Context, _ ...interface{}) *Row {
return nil
}
func (_ *Stmt) Queryx(_ ...interface{}) (*Rows, error) {
return nil, nil
}
func (_ *Stmt) QueryxContext(_ context.Context, _ ...interface{}) (*Rows, error) {
return nil, nil
}
func (_ *Stmt) Select(_ interface{}, _ ...interface{}) error {
return nil
}
func (_ *Stmt) SelectContext(_ context.Context, _ interface{}, _ ...interface{}) error {
return nil
}
func (_ *Stmt) Unsafe() *Stmt {
return nil
}
type Tx struct {
*sql.Tx
Mapper interface{}
}
func (_ *Tx) BindNamed(_ string, _ interface{}) (string, []interface{}, error) {
return "", nil, nil
}
func (_ *Tx) DriverName() string {
return ""
}
func (_ *Tx) Get(_ interface{}, _ string, _ ...interface{}) error {
return nil
}
func (_ *Tx) GetContext(_ context.Context, _ interface{}, _ string, _ ...interface{}) error {
return nil
}
func (_ *Tx) MustExec(_ string, _ ...interface{}) sql.Result {
return nil
}
func (_ *Tx) MustExecContext(_ context.Context, _ string, _ ...interface{}) sql.Result {
return nil
}
func (_ *Tx) NamedExec(_ string, _ interface{}) (sql.Result, error) {
return nil, nil
}
func (_ *Tx) NamedExecContext(_ context.Context, _ string, _ interface{}) (sql.Result, error) {
return nil, nil
}
func (_ *Tx) NamedQuery(_ string, _ interface{}) (*Rows, error) {
return nil, nil
}
func (_ *Tx) NamedStmt(_ *NamedStmt) *NamedStmt {
return nil
}
func (_ *Tx) NamedStmtContext(_ context.Context, _ *NamedStmt) *NamedStmt {
return nil
}
func (_ *Tx) PrepareNamed(_ string) (*NamedStmt, error) {
return nil, nil
}
func (_ *Tx) PrepareNamedContext(_ context.Context, _ string) (*NamedStmt, error) {
return nil, nil
}
func (_ *Tx) Preparex(_ string) (*Stmt, error) {
return nil, nil
}
func (_ *Tx) PreparexContext(_ context.Context, _ string) (*Stmt, error) {
return nil, nil
}
func (_ *Tx) QueryRowx(_ string, _ ...interface{}) *Row {
return nil
}
func (_ *Tx) QueryRowxContext(_ context.Context, _ string, _ ...interface{}) *Row {
return nil
}
func (_ *Tx) Queryx(_ string, _ ...interface{}) (*Rows, error) {
return nil, nil
}
func (_ *Tx) QueryxContext(_ context.Context, _ string, _ ...interface{}) (*Rows, error) {
return nil, nil
}
func (_ *Tx) Rebind(_ string) string {
return ""
}
func (_ *Tx) Select(_ interface{}, _ string, _ ...interface{}) error {
return nil
}
func (_ *Tx) SelectContext(_ context.Context, _ interface{}, _ string, _ ...interface{}) error {
return nil
}
func (_ *Tx) Stmtx(_ interface{}) *Stmt {
return nil
}
func (_ *Tx) StmtxContext(_ context.Context, _ interface{}) *Stmt {
return nil
}
func (_ *Tx) Unsafe() *Tx {
return nil
}

View File

@@ -1,47 +0,0 @@
package sqlx
import (
"context"
"database/sql"
)
type Tx struct {
*sql.Tx
}
func (tx *Tx) Get(dest interface{}, args ...interface{}) error {
return nil
}
func (tx *Tx) GetContext(ctx context.Context, dest interface{}, args ...interface{}) error {
return nil
}
func (tx *Tx) QueryRowx(args ...interface{}) *Row {
return nil
}
func (tx *Tx) QueryRowxContext(ctx context.Context, args ...interface{}) *Row {
return nil
}
func (tx *Tx) Queryx(args ...interface{}) (*Rows, error) {
return nil, nil
}
func (tx *Tx) QueryxContext(ctx context.Context, args ...interface{}) (*Rows, error) {
return nil, nil
}
func (tx *Tx) Select(dest interface{}, args ...interface{}) error {
return nil
}
func (tx *Tx) SelectContext(ctx context.Context, dest interface{}, args ...interface{}) error {
return nil
}
func (tx *Tx) NamedQuery(query string, arg interface{}) (*Rows, error) {
return nil, nil
}

View File

@@ -1,77 +1,878 @@
// Code generated by depstubber. DO NOT EDIT.
// This is a simple stub for gorm.io/gorm, strictly for use in testing.
// See the LICENSE file for information about the licensing of the original library.
// Source: gorm.io/gorm (exports: Association,ConnPool,DB; functions: )
// Package gorm is a stub of gorm.io/gorm, generated by depstubber.
package gorm
import (
"context"
"database/sql"
context "context"
sql "database/sql"
reflect "reflect"
strings "strings"
sync "sync"
time "time"
)
type DB struct{}
func (db *DB) Find(dest interface{}, conds ...interface{}) *DB {
return db
}
func (db *DB) FindInBatches(dest interface{}, batchSize int, fc func(tx *DB, batch int) error) *DB {
return db
}
func (db *DB) FirstOrCreate(dest interface{}, conds ...interface{}) *DB {
return db
}
func (db *DB) FirstOrInit(dest interface{}, conds ...interface{}) *DB {
return db
}
func (db *DB) First(dest interface{}, conds ...interface{}) *DB {
return db
}
func (db *DB) Model(value interface{}) *DB {
return db
}
func (db *DB) Last(dest interface{}, conds ...interface{}) *DB {
return db
}
func (db *DB) Pluck(column string, dest interface{}) *DB {
return db
}
func (db *DB) Take(dest interface{}, conds ...interface{}) *DB {
return db
}
func (db *DB) Scan(dest interface{}) *DB {
return db
}
func (db *DB) ScanRows(rows *sql.Rows, result interface{}) error {
return nil
}
func (db *DB) Row() *sql.Row {
return nil
}
func (db *DB) Rows() (*sql.Rows, error) {
return nil, nil
}
type Association struct {
DB *DB
DB *DB
Relationship interface{}
Unscope bool
Error error
}
func (a *Association) Find(dest interface{}) *Association {
return a
func (_ *Association) Append(_ ...interface{}) error {
return nil
}
func (_ *Association) Clear() error {
return nil
}
func (_ *Association) Count() int64 {
return 0
}
func (_ *Association) Delete(_ ...interface{}) error {
return nil
}
func (_ *Association) Find(_ interface{}, _ ...interface{}) error {
return nil
}
func (_ *Association) Replace(_ ...interface{}) error {
return nil
}
func (_ *Association) Unscoped() *Association {
return nil
}
type ColumnType interface {
AutoIncrement() (bool, bool)
ColumnType() (string, bool)
Comment() (string, bool)
DatabaseTypeName() string
DecimalSize() (int64, int64, bool)
DefaultValue() (string, bool)
Length() (int64, bool)
Name() string
Nullable() (bool, bool)
PrimaryKey() (bool, bool)
ScanType() reflect.Type
Unique() (bool, bool)
}
type Config struct {
SkipDefaultTransaction bool
NamingStrategy interface{}
FullSaveAssociations bool
Logger interface{}
NowFunc func() time.Time
DryRun bool
PrepareStmt bool
DisableAutomaticPing bool
DisableForeignKeyConstraintWhenMigrating bool
IgnoreRelationshipsWhenMigrating bool
DisableNestedTransaction bool
AllowGlobalUpdate bool
QueryFields bool
CreateBatchSize int
TranslateError bool
PropagateUnscoped bool
ClauseBuilders map[string]interface{}
ConnPool ConnPool
Dialector
Plugins map[string]Plugin
}
func (_ Config) BindVarTo(_ interface{}, _ *Statement, _ interface{}) {}
func (_ Config) DataTypeOf(_ interface{}) string {
return ""
}
func (_ Config) DefaultValueOf(_ interface{}) interface{} {
return nil
}
func (_ Config) Explain(_ string, _ ...interface{}) string {
return ""
}
func (_ Config) Initialize(_ *DB) error {
return nil
}
func (_ Config) Migrator(_ *DB) Migrator {
return nil
}
func (_ Config) Name() string {
return ""
}
func (_ Config) QuoteTo(_ interface{}, _ string) {}
func (_ *Config) AfterInitialize(_ *DB) error {
return nil
}
func (_ *Config) Apply(_ *Config) error {
return nil
}
type ConnPool interface {
PrepareContext(ctx context.Context, query string) (*sql.Stmt, error)
ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error)
QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error)
QueryRowContext(ctx context.Context, query string, args ...interface{}) *sql.Row
ExecContext(_ context.Context, _ string, _ ...interface{}) (sql.Result, error)
PrepareContext(_ context.Context, _ string) (*sql.Stmt, error)
QueryContext(_ context.Context, _ string, _ ...interface{}) (*sql.Rows, error)
QueryRowContext(_ context.Context, _ string, _ ...interface{}) *sql.Row
}
type Model interface{}
type DB struct {
*Config
Error error
RowsAffected int64
Statement *Statement
}
func (_ DB) AfterInitialize(_ *DB) error {
return nil
}
func (_ DB) Apply(_ *Config) error {
return nil
}
func (_ DB) BindVarTo(_ interface{}, _ *Statement, _ interface{}) {}
func (_ DB) DataTypeOf(_ interface{}) string {
return ""
}
func (_ DB) DefaultValueOf(_ interface{}) interface{} {
return nil
}
func (_ DB) Explain(_ string, _ ...interface{}) string {
return ""
}
func (_ DB) Initialize(_ *DB) error {
return nil
}
func (_ DB) Name() string {
return ""
}
func (_ DB) QuoteTo(_ interface{}, _ string) {}
func (_ *DB) AddError(_ error) error {
return nil
}
func (_ *DB) Assign(_ ...interface{}) *DB {
return nil
}
func (_ *DB) Association(_ string) *Association {
return nil
}
func (_ *DB) Attrs(_ ...interface{}) *DB {
return nil
}
func (_ *DB) AutoMigrate(_ ...interface{}) error {
return nil
}
func (_ *DB) Begin(_ ...*sql.TxOptions) *DB {
return nil
}
func (_ *DB) Callback() interface{} {
return nil
}
func (_ *DB) Clauses(_ ...interface{}) *DB {
return nil
}
func (_ *DB) Commit() *DB {
return nil
}
func (_ *DB) Connection(_ func(*DB) error) error {
return nil
}
func (_ *DB) Count(_ *int64) *DB {
return nil
}
func (_ *DB) Create(_ interface{}) *DB {
return nil
}
func (_ *DB) CreateInBatches(_ interface{}, _ int) *DB {
return nil
}
func (_ *DB) DB() (*sql.DB, error) {
return nil, nil
}
func (_ *DB) Debug() *DB {
return nil
}
func (_ *DB) Delete(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ *DB) Distinct(_ ...interface{}) *DB {
return nil
}
func (_ *DB) Exec(_ string, _ ...interface{}) *DB {
return nil
}
func (_ *DB) Find(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ *DB) FindInBatches(_ interface{}, _ int, _ func(*DB, int) error) *DB {
return nil
}
func (_ *DB) First(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ *DB) FirstOrCreate(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ *DB) FirstOrInit(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ *DB) Get(_ string) (interface{}, bool) {
return nil, false
}
func (_ *DB) Group(_ string) *DB {
return nil
}
func (_ *DB) Having(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ *DB) InnerJoins(_ string, _ ...interface{}) *DB {
return nil
}
func (_ *DB) InstanceGet(_ string) (interface{}, bool) {
return nil, false
}
func (_ *DB) InstanceSet(_ string, _ interface{}) *DB {
return nil
}
func (_ *DB) Joins(_ string, _ ...interface{}) *DB {
return nil
}
func (_ *DB) Last(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ *DB) Limit(_ int) *DB {
return nil
}
func (_ *DB) MapColumns(_ map[string]string) *DB {
return nil
}
func (_ *DB) Migrator() Migrator {
return nil
}
func (_ *DB) Model(_ interface{}) *DB {
return nil
}
func (_ *DB) Not(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ *DB) Offset(_ int) *DB {
return nil
}
func (_ *DB) Omit(_ ...string) *DB {
return nil
}
func (_ *DB) Or(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ *DB) Order(_ interface{}) *DB {
return nil
}
func (_ *DB) Pluck(_ string, _ interface{}) *DB {
return nil
}
func (_ *DB) Preload(_ string, _ ...interface{}) *DB {
return nil
}
func (_ *DB) Raw(_ string, _ ...interface{}) *DB {
return nil
}
func (_ *DB) Rollback() *DB {
return nil
}
func (_ *DB) RollbackTo(_ string) *DB {
return nil
}
func (_ *DB) Row() *sql.Row {
return nil
}
func (_ *DB) Rows() (*sql.Rows, error) {
return nil, nil
}
func (_ *DB) Save(_ interface{}) *DB {
return nil
}
func (_ *DB) SavePoint(_ string) *DB {
return nil
}
func (_ *DB) Scan(_ interface{}) *DB {
return nil
}
func (_ *DB) ScanRows(_ *sql.Rows, _ interface{}) error {
return nil
}
func (_ *DB) Scopes(_ ...func(*DB) *DB) *DB {
return nil
}
func (_ *DB) Select(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ *DB) Session(_ *Session) *DB {
return nil
}
func (_ *DB) Set(_ string, _ interface{}) *DB {
return nil
}
func (_ *DB) SetupJoinTable(_ interface{}, _ string, _ interface{}) error {
return nil
}
func (_ *DB) Table(_ string, _ ...interface{}) *DB {
return nil
}
func (_ *DB) Take(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ *DB) ToSQL(_ func(*DB) *DB) string {
return ""
}
func (_ *DB) Transaction(_ func(*DB) error, _ ...*sql.TxOptions) error {
return nil
}
func (_ *DB) Unscoped() *DB {
return nil
}
func (_ *DB) Update(_ string, _ interface{}) *DB {
return nil
}
func (_ *DB) UpdateColumn(_ string, _ interface{}) *DB {
return nil
}
func (_ *DB) UpdateColumns(_ interface{}) *DB {
return nil
}
func (_ *DB) Updates(_ interface{}) *DB {
return nil
}
func (_ *DB) Use(_ Plugin) error {
return nil
}
func (_ *DB) Where(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ *DB) WithContext(_ context.Context) *DB {
return nil
}
type Dialector interface {
BindVarTo(_ interface{}, _ *Statement, _ interface{})
DataTypeOf(_ interface{}) string
DefaultValueOf(_ interface{}) interface{}
Explain(_ string, _ ...interface{}) string
Initialize(_ *DB) error
Migrator(_ *DB) Migrator
Name() string
QuoteTo(_ interface{}, _ string)
}
type Index interface {
Columns() []string
Name() string
Option() string
PrimaryKey() (bool, bool)
Table() string
Unique() (bool, bool)
}
type Migrator interface {
AddColumn(_ interface{}, _ string) error
AlterColumn(_ interface{}, _ string) error
AutoMigrate(_ ...interface{}) error
ColumnTypes(_ interface{}) ([]ColumnType, error)
CreateConstraint(_ interface{}, _ string) error
CreateIndex(_ interface{}, _ string) error
CreateTable(_ ...interface{}) error
CreateView(_ string, _ ViewOption) error
CurrentDatabase() string
DropColumn(_ interface{}, _ string) error
DropConstraint(_ interface{}, _ string) error
DropIndex(_ interface{}, _ string) error
DropTable(_ ...interface{}) error
DropView(_ string) error
FullDataTypeOf(_ interface{}) interface{}
GetIndexes(_ interface{}) ([]Index, error)
GetTables() ([]string, error)
GetTypeAliases(_ string) []string
HasColumn(_ interface{}, _ string) bool
HasConstraint(_ interface{}, _ string) bool
HasIndex(_ interface{}, _ string) bool
HasTable(_ interface{}) bool
MigrateColumn(_ interface{}, _ interface{}, _ ColumnType) error
MigrateColumnUnique(_ interface{}, _ interface{}, _ ColumnType) error
RenameColumn(_ interface{}, _ string, _ string) error
RenameIndex(_ interface{}, _ string, _ string) error
RenameTable(_ interface{}, _ interface{}) error
TableType(_ interface{}) (TableType, error)
}
type Plugin interface {
Initialize(_ *DB) error
Name() string
}
type Session struct {
DryRun bool
PrepareStmt bool
NewDB bool
Initialized bool
SkipHooks bool
SkipDefaultTransaction bool
DisableNestedTransaction bool
AllowGlobalUpdate bool
FullSaveAssociations bool
PropagateUnscoped bool
QueryFields bool
Context context.Context
Logger interface{}
NowFunc func() time.Time
CreateBatchSize int
}
type Statement struct {
*DB
TableExpr interface{}
Table string
Model interface{}
Unscoped bool
Dest interface{}
ReflectValue reflect.Value
Clauses map[string]interface{}
BuildClauses []string
Distinct bool
Selects []string
Omits []string
ColumnMapping map[string]string
Joins []interface{}
Preloads map[string][]interface{}
Settings sync.Map
ConnPool ConnPool
Schema interface{}
Context context.Context
RaiseErrorOnNotFound bool
SkipHooks bool
SQL strings.Builder
Vars []interface{}
CurDestIndex int
}
func (_ Statement) AddError(_ error) error {
return nil
}
func (_ Statement) AfterInitialize(_ *DB) error {
return nil
}
func (_ Statement) Apply(_ *Config) error {
return nil
}
func (_ Statement) Assign(_ ...interface{}) *DB {
return nil
}
func (_ Statement) Association(_ string) *Association {
return nil
}
func (_ Statement) Attrs(_ ...interface{}) *DB {
return nil
}
func (_ Statement) AutoMigrate(_ ...interface{}) error {
return nil
}
func (_ Statement) Begin(_ ...*sql.TxOptions) *DB {
return nil
}
func (_ Statement) BindVarTo(_ interface{}, _ *Statement, _ interface{}) {}
func (_ Statement) Callback() interface{} {
return nil
}
func (_ Statement) Commit() *DB {
return nil
}
func (_ Statement) Connection(_ func(*DB) error) error {
return nil
}
func (_ Statement) Count(_ *int64) *DB {
return nil
}
func (_ Statement) Create(_ interface{}) *DB {
return nil
}
func (_ Statement) CreateInBatches(_ interface{}, _ int) *DB {
return nil
}
func (_ Statement) DataTypeOf(_ interface{}) string {
return ""
}
func (_ Statement) Debug() *DB {
return nil
}
func (_ Statement) DefaultValueOf(_ interface{}) interface{} {
return nil
}
func (_ Statement) Delete(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ Statement) Exec(_ string, _ ...interface{}) *DB {
return nil
}
func (_ Statement) Explain(_ string, _ ...interface{}) string {
return ""
}
func (_ Statement) Find(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ Statement) FindInBatches(_ interface{}, _ int, _ func(*DB, int) error) *DB {
return nil
}
func (_ Statement) First(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ Statement) FirstOrCreate(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ Statement) FirstOrInit(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ Statement) Get(_ string) (interface{}, bool) {
return nil, false
}
func (_ Statement) Group(_ string) *DB {
return nil
}
func (_ Statement) Having(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ Statement) Initialize(_ *DB) error {
return nil
}
func (_ Statement) InnerJoins(_ string, _ ...interface{}) *DB {
return nil
}
func (_ Statement) InstanceGet(_ string) (interface{}, bool) {
return nil, false
}
func (_ Statement) InstanceSet(_ string, _ interface{}) *DB {
return nil
}
func (_ Statement) Last(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ Statement) Limit(_ int) *DB {
return nil
}
func (_ Statement) MapColumns(_ map[string]string) *DB {
return nil
}
func (_ Statement) Migrator() Migrator {
return nil
}
func (_ Statement) Name() string {
return ""
}
func (_ Statement) Not(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ Statement) Offset(_ int) *DB {
return nil
}
func (_ Statement) Omit(_ ...string) *DB {
return nil
}
func (_ Statement) Or(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ Statement) Order(_ interface{}) *DB {
return nil
}
func (_ Statement) Pluck(_ string, _ interface{}) *DB {
return nil
}
func (_ Statement) Preload(_ string, _ ...interface{}) *DB {
return nil
}
func (_ Statement) Raw(_ string, _ ...interface{}) *DB {
return nil
}
func (_ Statement) Rollback() *DB {
return nil
}
func (_ Statement) RollbackTo(_ string) *DB {
return nil
}
func (_ Statement) Row() *sql.Row {
return nil
}
func (_ Statement) Rows() (*sql.Rows, error) {
return nil, nil
}
func (_ Statement) Save(_ interface{}) *DB {
return nil
}
func (_ Statement) SavePoint(_ string) *DB {
return nil
}
func (_ Statement) Scan(_ interface{}) *DB {
return nil
}
func (_ Statement) ScanRows(_ *sql.Rows, _ interface{}) error {
return nil
}
func (_ Statement) Scopes(_ ...func(*DB) *DB) *DB {
return nil
}
func (_ Statement) Select(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ Statement) Session(_ *Session) *DB {
return nil
}
func (_ Statement) Set(_ string, _ interface{}) *DB {
return nil
}
func (_ Statement) SetupJoinTable(_ interface{}, _ string, _ interface{}) error {
return nil
}
func (_ Statement) Take(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ Statement) ToSQL(_ func(*DB) *DB) string {
return ""
}
func (_ Statement) Transaction(_ func(*DB) error, _ ...*sql.TxOptions) error {
return nil
}
func (_ Statement) Update(_ string, _ interface{}) *DB {
return nil
}
func (_ Statement) UpdateColumn(_ string, _ interface{}) *DB {
return nil
}
func (_ Statement) UpdateColumns(_ interface{}) *DB {
return nil
}
func (_ Statement) Updates(_ interface{}) *DB {
return nil
}
func (_ Statement) Use(_ Plugin) error {
return nil
}
func (_ Statement) Where(_ interface{}, _ ...interface{}) *DB {
return nil
}
func (_ Statement) WithContext(_ context.Context) *DB {
return nil
}
func (_ *Statement) AddClause(_ interface{}) {}
func (_ *Statement) AddClauseIfNotExists(_ interface{}) {}
func (_ *Statement) AddVar(_ interface{}, _ ...interface{}) {}
func (_ *Statement) Build(_ ...string) {}
func (_ *Statement) BuildCondition(_ interface{}, _ ...interface{}) []interface{} {
return nil
}
func (_ *Statement) Changed(_ ...string) bool {
return false
}
func (_ *Statement) Parse(_ interface{}) error {
return nil
}
func (_ *Statement) ParseWithSpecialTableName(_ interface{}, _ string) error {
return nil
}
func (_ *Statement) Quote(_ interface{}) string {
return ""
}
func (_ *Statement) QuoteTo(_ interface{}, _ interface{}) {}
func (_ *Statement) SelectAndOmitColumns(_ bool, _ bool) (map[string]bool, bool) {
return nil, false
}
func (_ *Statement) SetColumn(_ string, _ interface{}, _ ...bool) {}
func (_ *Statement) WriteByte(_ byte) error {
return nil
}
func (_ *Statement) WriteQuoted(_ interface{}) {}
func (_ *Statement) WriteString(_ string) (int, error) {
return 0, nil
}
type TableType interface {
Comment() (string, bool)
Name() string
Schema() string
Type() string
}
type ViewOption struct {
Replace bool
CheckOption string
Query *DB
}

View File

@@ -1,9 +1,144 @@
# gorm.io/gorm v1.23.0
# github.com/astaxie/beego v1.12.3
## explicit
gorm.io/gorm
github.com/astaxie/beego/orm
# github.com/beego/beego/v2 v2.3.5
## explicit
github.com/beego/beego/v2/client/orm
# github.com/couchbase/gocb v1.6.7
## explicit
github.com/couchbase/gocb
# github.com/couchbase/gocb/v2 v2.9.4
## explicit
github.com/couchbase/gocb/v2
# github.com/jmoiron/sqlx v1.4.0
## explicit
github.com/jmoiron/sqlx
# go.mongodb.org/mongo-driver/mongo v1.17.2
# github.com/rqlite/gorqlite v0.0.0-20250128004930-114c7828b55a
## explicit
github.com/rqlite/gorqlite
# go.mongodb.org/mongo-driver v1.17.3
## explicit
go.mongodb.org/mongo-driver/mongo
# gorm.io/gorm v1.25.12
## explicit
gorm.io/gorm
# github.com/couchbase/gocbcore/v10 v10.5.4
## explicit
github.com/couchbase/gocbcore/v10
# github.com/couchbase/gocbcoreps v0.1.3
## explicit
github.com/couchbase/gocbcoreps
# github.com/couchbase/goprotostellar v1.0.2
## explicit
github.com/couchbase/goprotostellar
# github.com/couchbaselabs/gocbconnstr/v2 v2.0.0-20240607131231-fb385523de28
## explicit
github.com/couchbaselabs/gocbconnstr/v2
# github.com/go-logr/logr v1.4.1
## explicit
github.com/go-logr/logr
# github.com/go-logr/stdr v1.2.2
## explicit
github.com/go-logr/stdr
# github.com/golang/snappy v0.0.4
## explicit
github.com/golang/snappy
# github.com/google/uuid v1.6.0
## explicit
github.com/google/uuid
# github.com/grpc-ecosystem/go-grpc-middleware v1.4.0
## explicit
github.com/grpc-ecosystem/go-grpc-middleware
# github.com/hashicorp/golang-lru v0.5.4
## explicit
github.com/hashicorp/golang-lru
# github.com/jinzhu/inflection v1.0.0
## explicit
github.com/jinzhu/inflection
# github.com/jinzhu/now v1.1.5
## explicit
github.com/jinzhu/now
# github.com/klauspost/compress v1.16.7
## explicit
github.com/klauspost/compress
# github.com/montanaflynn/stats v0.7.1
## explicit
github.com/montanaflynn/stats
# github.com/opentracing/opentracing-go v1.2.0
## explicit
github.com/opentracing/opentracing-go
# github.com/pkg/errors v0.9.1
## explicit
github.com/pkg/errors
# github.com/shiena/ansicolor v0.0.0-20200904210342-c7312218db18
## explicit
github.com/shiena/ansicolor
# github.com/valyala/bytebufferpool v1.0.0
## explicit
github.com/valyala/bytebufferpool
# github.com/xdg-go/pbkdf2 v1.0.0
## explicit
github.com/xdg-go/pbkdf2
# github.com/xdg-go/scram v1.1.2
## explicit
github.com/xdg-go/scram
# github.com/xdg-go/stringprep v1.0.4
## explicit
github.com/xdg-go/stringprep
# github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78
## explicit
github.com/youmark/pkcs8
# go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0
## explicit
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc
# go.opentelemetry.io/otel v1.24.0
## explicit
go.opentelemetry.io/otel
# go.opentelemetry.io/otel/metric v1.24.0
## explicit
go.opentelemetry.io/otel/metric
# go.opentelemetry.io/otel/trace v1.24.0
## explicit
go.opentelemetry.io/otel/trace
# go.uber.org/multierr v1.11.0
## explicit
go.uber.org/multierr
# go.uber.org/zap v1.27.0
## explicit
go.uber.org/zap
# golang.org/x/crypto v0.26.0
## explicit
golang.org/x/crypto
# golang.org/x/net v0.24.0
## explicit
golang.org/x/net
# golang.org/x/sync v0.8.0
## explicit
golang.org/x/sync
# golang.org/x/sys v0.23.0
## explicit
golang.org/x/sys
# golang.org/x/text v0.17.0
## explicit
golang.org/x/text
# google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda
## explicit
google.golang.org/genproto/googleapis/rpc
# google.golang.org/grpc v1.63.2
## explicit
google.golang.org/grpc
# google.golang.org/protobuf v1.34.2
## explicit
google.golang.org/protobuf
# gopkg.in/couchbase/gocbcore.v7 v7.1.18
## explicit
gopkg.in/couchbase/gocbcore.v7
# gopkg.in/couchbaselabs/gocbconnstr.v1 v1.0.4
## explicit
gopkg.in/couchbaselabs/gocbconnstr.v1
# gopkg.in/couchbaselabs/gojcbmock.v1 v1.0.4
## explicit
gopkg.in/couchbaselabs/gojcbmock.v1
# gopkg.in/couchbaselabs/jsonx.v1 v1.0.1
## explicit
gopkg.in/couchbaselabs/jsonx.v1

View File

@@ -30,52 +30,54 @@ import (
func handler(req *http.Request, ctx *goproxy.ProxyCtx) {
username := req.URL.Query()["username"][0]
slice := []any{"username", username}
password := req.URL.Query()["password"][0]
formatString := req.URL.Query()["formatString"][0]
testFlag := req.URL.Query()["testFlag"][0]
slice := []any{"username", username}
{
fmt.Print(username) // $ hasTaintFlow="username"
fmt.Printf(username) // $ hasTaintFlow="username"
fmt.Println(username) // $ hasTaintFlow="username"
fmt.Fprint(nil, username) // Fprint functions are only loggers if they target stdout/stderr
fmt.Fprintf(nil, username)
fmt.Fprintln(nil, username)
fmt.Print(username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
fmt.Printf(formatString, username, password) // $ hasTaintFlow="formatString" hasTaintFlow="username" hasTaintFlow="password"
fmt.Println(username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
fmt.Fprint(nil, username, password) // Fprint functions are only loggers if they target stdout/stderr
fmt.Fprintf(nil, formatString, username, password)
fmt.Fprintln(nil, username, password)
}
// log
{
log.Print("user %s logged in.\n", username) // $ hasTaintFlow="username"
log.Printf("user %s logged in.\n", username) // $ hasTaintFlow="username"
log.Println("user %s logged in.\n", username) // $ hasTaintFlow="username"
log.Print("user is logged in:", username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
log.Printf(formatString, username, password) // $ hasTaintFlow="formatString" hasTaintFlow="username" hasTaintFlow="password"
log.Println("user is logged in:", username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
if testFlag == "true" {
log.Fatal("user %s logged in.\n", username) // $ hasTaintFlow="username"
log.Fatal("user is logged in:", username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
}
if testFlag == "true" {
log.Fatalf("user %s logged in.\n", username) // $ hasTaintFlow="username"
log.Fatalf(formatString, username, password) // $ hasTaintFlow="formatString" hasTaintFlow="username" hasTaintFlow="password"
}
if testFlag == "true" {
log.Fatalln("user %s logged in.\n", username) // $ hasTaintFlow="username"
log.Fatalln("user is logged in:", username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
}
if testFlag == "true" {
log.Panic("user %s logged in.\n", username) // $ hasTaintFlow="username"
log.Panic("user is logged in:", username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
}
if testFlag == "true" {
log.Panicf("user %s logged in.\n", username) // $ hasTaintFlow="username"
log.Panicf(formatString, username, password) // $ hasTaintFlow="formatString" hasTaintFlow="username" hasTaintFlow="password"
}
if testFlag == "true" {
log.Panicln("user %s logged in.\n", username) // $ hasTaintFlow="username"
log.Panicln("user is logged in:", username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
}
logger := log.Default()
logger.Print("user %s logged in.\n", username) // $ hasTaintFlow="username"
logger.Printf("user %s logged in.\n", username) // $ hasTaintFlow="username"
logger.Println("user %s logged in.\n", username) // $ hasTaintFlow="username"
logger.Fatal("user %s logged in.\n", username) // $ hasTaintFlow="username"
logger.Fatalf("user %s logged in.\n", username) // $ hasTaintFlow="username"
logger.Fatalln("user %s logged in.\n", username) // $ hasTaintFlow="username"
logger.Panic("user %s logged in.\n", username) // $ hasTaintFlow="username"
logger.Panicf("user %s logged in.\n", username) // $ hasTaintFlow="username"
logger.Panicln("user %s logged in.\n", username) // $ hasTaintFlow="username"
logger.Print("user is logged in:", username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
logger.Printf(formatString, username, password) // $ hasTaintFlow="formatString" hasTaintFlow="username" hasTaintFlow="password"
logger.Println("user is logged in:", username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
logger.Fatal("user is logged in:", username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
logger.Fatalf(formatString, username, password) // $ hasTaintFlow="formatString" hasTaintFlow="username" hasTaintFlow="password"
logger.Fatalln("user is logged in:", username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
logger.Panic("user is logged in:", username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
logger.Panicf(formatString, username, password) // $ hasTaintFlow="formatString" hasTaintFlow="username" hasTaintFlow="password"
logger.Panicln("user is logged in:", username, password) // $ hasTaintFlow="username" hasTaintFlow="password"
}
// k8s.io/klog
{
@@ -421,7 +423,6 @@ func handler(req *http.Request, ctx *goproxy.ProxyCtx) {
simpleLogger.Tracew("%s", username) // $ hasTaintFlow="username"
simpleLogger.Debugw("%s %s", slice...) // $ hasTaintFlow="slice"
}
}
type Logger interface {
@@ -514,8 +515,12 @@ func handlerGood4(req *http.Request, ctx *goproxy.ProxyCtx) {
verbose.Infof("user %q logged in.\n", username)
klog.Infof("user %q logged in.\n", username)
klog.Errorf("user %q logged in.\n", username)
klog.Fatalf("user %q logged in.\n", username)
klog.Exitf("user %q logged in.\n", username)
if testFlag == " true" {
klog.Fatalf("user %q logged in.\n", username)
}
if testFlag == " true" {
klog.Exitf("user %q logged in.\n", username)
}
}
// elazarl/goproxy
{
@@ -529,16 +534,24 @@ func handlerGood4(req *http.Request, ctx *goproxy.ProxyCtx) {
glog.Infof("user %q logged in.\n", username)
glog.Errorf("user %q logged in.\n", username)
glog.Fatalf("user %q logged in.\n", username)
glog.Exitf("user %q logged in.\n", username)
if testFlag == " true" {
glog.Fatalf("user %q logged in.\n", username)
}
if testFlag == " true" {
glog.Exitf("user %q logged in.\n", username)
}
}
// sirupsen/logrus
{
logrus.Debugf("user %q logged in.\n", username)
logrus.Errorf("user %q logged in.\n", username)
logrus.Fatalf("user %q logged in.\n", username)
if testFlag == " true" {
logrus.Fatalf("user %q logged in.\n", username)
}
logrus.Infof("user %q logged in.\n", username)
logrus.Panicf("user %q logged in.\n", username)
if testFlag == " true" {
logrus.Panicf("user %q logged in.\n", username)
}
logrus.Printf("user %q logged in.\n", username)
logrus.Tracef("user %q logged in.\n", username)
logrus.Warnf("user %q logged in.\n", username)
@@ -548,10 +561,14 @@ func handlerGood4(req *http.Request, ctx *goproxy.ProxyCtx) {
entry := logrus.WithFields(fields)
entry.Debugf("user %q logged in.\n", username)
entry.Errorf("user %q logged in.\n", username)
entry.Fatalf("user %q logged in.\n", username)
if testFlag == " true" {
entry.Fatalf("user %q logged in.\n", username)
}
entry.Infof("user %q logged in.\n", username)
entry.Logf(0, "user %q logged in.\n", username)
entry.Panicf("user %q logged in.\n", username)
if testFlag == " true" {
entry.Panicf("user %q logged in.\n", username)
}
entry.Printf("user %q logged in.\n", username)
entry.Tracef("user %q logged in.\n", username)
entry.Warnf("user %q logged in.\n", username)
@@ -560,10 +577,14 @@ func handlerGood4(req *http.Request, ctx *goproxy.ProxyCtx) {
logger := entry.Logger
logger.Debugf("user %q logged in.\n", username)
logger.Errorf("user %q logged in.\n", username)
logger.Fatalf("user %q logged in.\n", username)
if testFlag == " true" {
logger.Fatalf("user %q logged in.\n", username)
}
logger.Infof("user %q logged in.\n", username)
logger.Logf(0, "user %q logged in.\n", username)
logger.Panicf("user %q logged in.\n", username)
if testFlag == " true" {
logger.Panicf("user %q logged in.\n", username)
}
logger.Printf("user %q logged in.\n", username)
logger.Tracef("user %q logged in.\n", username)
logger.Warnf("user %q logged in.\n", username)
@@ -599,8 +620,12 @@ func handlerGood4(req *http.Request, ctx *goproxy.ProxyCtx) {
verbose.Infof("user %#q logged in.\n", username) // $ hasTaintFlow="username"
klog.Infof("user %#q logged in.\n", username) // $ hasTaintFlow="username"
klog.Errorf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
klog.Fatalf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
klog.Exitf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
if testFlag == " true" {
klog.Fatalf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
}
if testFlag == " true" {
klog.Exitf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
}
}
// elazarl/goproxy
{
@@ -614,16 +639,24 @@ func handlerGood4(req *http.Request, ctx *goproxy.ProxyCtx) {
glog.Infof("user %#q logged in.\n", username) // $ hasTaintFlow="username"
glog.Errorf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
glog.Fatalf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
glog.Exitf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
if testFlag == " true" {
glog.Fatalf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
}
if testFlag == " true" {
glog.Exitf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
}
}
// sirupsen/logrus
{
logrus.Debugf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logrus.Errorf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logrus.Fatalf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logrus.Infof("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logrus.Panicf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logrus.Debugf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logrus.Errorf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
if testFlag == " true" {
logrus.Fatalf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
}
logrus.Infof("user %#q logged in.\n", username) // $ hasTaintFlow="username"
if testFlag == " true" {
logrus.Panicf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
}
logrus.Printf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logrus.Tracef("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logrus.Warnf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
@@ -631,24 +664,32 @@ func handlerGood4(req *http.Request, ctx *goproxy.ProxyCtx) {
fields := make(logrus.Fields)
entry := logrus.WithFields(fields)
entry.Debugf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
entry.Errorf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
entry.Fatalf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
entry.Infof("user %#q logged in.\n", username) // $ hasTaintFlow="username"
entry.Logf(0, "user %#q logged in.\n", username) // $ hasTaintFlow="username"
entry.Panicf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
entry.Debugf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
entry.Errorf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
if testFlag == " true" {
entry.Fatalf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
}
entry.Infof("user %#q logged in.\n", username) // $ hasTaintFlow="username"
entry.Logf(0, "user %#q logged in.\n", username) // $ hasTaintFlow="username"
if testFlag == " true" {
entry.Panicf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
}
entry.Printf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
entry.Tracef("user %#q logged in.\n", username) // $ hasTaintFlow="username"
entry.Warnf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
entry.Warningf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logger := entry.Logger
logger.Debugf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logger.Errorf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logger.Fatalf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logger.Infof("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logger.Logf(0, "user %#q logged in.\n", username) // $ hasTaintFlow="username"
logger.Panicf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logger.Debugf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logger.Errorf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
if testFlag == " true" {
logger.Fatalf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
}
logger.Infof("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logger.Logf(0, "user %#q logged in.\n", username) // $ hasTaintFlow="username"
if testFlag == " true" {
logger.Panicf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
}
logger.Printf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logger.Tracef("user %#q logged in.\n", username) // $ hasTaintFlow="username"
logger.Warnf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
@@ -677,3 +718,9 @@ func handlerGood4(req *http.Request, ctx *goproxy.ProxyCtx) {
sLogger.Warnf("user %#q logged in.\n", username) // $ hasTaintFlow="username"
}
}
// GOOD: User-provided values formatted using a %T directive, which prints the type of the argument
func handlerGood5(req *http.Request) {
object := req.URL.Query()["username"][0]
log.Printf("found object of type %T.\n", object)
}

View File

@@ -1,35 +1,35 @@
#select
| klog.go:22:15:22:20 | header | klog.go:20:30:20:37 | selection of Header | klog.go:22:15:22:20 | header | $@ flows to a logging call. | klog.go:20:30:20:37 | selection of Header | Sensitive data returned by HTTP request headers |
| klog.go:28:13:28:41 | call to Get | klog.go:28:13:28:20 | selection of Header | klog.go:28:13:28:41 | call to Get | $@ flows to a logging call. | klog.go:28:13:28:20 | selection of Header | Sensitive data returned by HTTP request headers |
| main.go:15:12:15:19 | password | main.go:15:12:15:19 | password | main.go:15:12:15:19 | password | $@ flows to a logging call. | main.go:15:12:15:19 | password | Sensitive data returned by an access to password |
| main.go:16:17:16:24 | password | main.go:16:17:16:24 | password | main.go:16:17:16:24 | password | $@ flows to a logging call. | main.go:16:17:16:24 | password | Sensitive data returned by an access to password |
| main.go:17:13:17:20 | password | main.go:17:13:17:20 | password | main.go:17:13:17:20 | password | $@ flows to a logging call. | main.go:17:13:17:20 | password | Sensitive data returned by an access to password |
| main.go:18:14:18:21 | password | main.go:18:14:18:21 | password | main.go:18:14:18:21 | password | $@ flows to a logging call. | main.go:18:14:18:21 | password | Sensitive data returned by an access to password |
| main.go:19:12:19:19 | password | main.go:19:12:19:19 | password | main.go:19:12:19:19 | password | $@ flows to a logging call. | main.go:19:12:19:19 | password | Sensitive data returned by an access to password |
| main.go:20:17:20:24 | password | main.go:20:17:20:24 | password | main.go:20:17:20:24 | password | $@ flows to a logging call. | main.go:20:17:20:24 | password | Sensitive data returned by an access to password |
| main.go:21:13:21:20 | password | main.go:21:13:21:20 | password | main.go:21:13:21:20 | password | $@ flows to a logging call. | main.go:21:13:21:20 | password | Sensitive data returned by an access to password |
| main.go:22:14:22:21 | password | main.go:22:14:22:21 | password | main.go:22:14:22:21 | password | $@ flows to a logging call. | main.go:22:14:22:21 | password | Sensitive data returned by an access to password |
| main.go:23:12:23:19 | password | main.go:23:12:23:19 | password | main.go:23:12:23:19 | password | $@ flows to a logging call. | main.go:23:12:23:19 | password | Sensitive data returned by an access to password |
| main.go:24:17:24:24 | password | main.go:24:17:24:24 | password | main.go:24:17:24:24 | password | $@ flows to a logging call. | main.go:24:17:24:24 | password | Sensitive data returned by an access to password |
| main.go:25:13:25:20 | password | main.go:25:13:25:20 | password | main.go:25:13:25:20 | password | $@ flows to a logging call. | main.go:25:13:25:20 | password | Sensitive data returned by an access to password |
| main.go:26:14:26:21 | password | main.go:26:14:26:21 | password | main.go:26:14:26:21 | password | $@ flows to a logging call. | main.go:26:14:26:21 | password | Sensitive data returned by an access to password |
| main.go:27:16:27:23 | password | main.go:27:16:27:23 | password | main.go:27:16:27:23 | password | $@ flows to a logging call. | main.go:27:16:27:23 | password | Sensitive data returned by an access to password |
| main.go:30:10:30:17 | password | main.go:30:10:30:17 | password | main.go:30:10:30:17 | password | $@ flows to a logging call. | main.go:30:10:30:17 | password | Sensitive data returned by an access to password |
| main.go:31:15:31:22 | password | main.go:31:15:31:22 | password | main.go:31:15:31:22 | password | $@ flows to a logging call. | main.go:31:15:31:22 | password | Sensitive data returned by an access to password |
| main.go:32:11:32:18 | password | main.go:32:11:32:18 | password | main.go:32:11:32:18 | password | $@ flows to a logging call. | main.go:32:11:32:18 | password | Sensitive data returned by an access to password |
| main.go:33:12:33:19 | password | main.go:33:12:33:19 | password | main.go:33:12:33:19 | password | $@ flows to a logging call. | main.go:33:12:33:19 | password | Sensitive data returned by an access to password |
| main.go:34:10:34:17 | password | main.go:34:10:34:17 | password | main.go:34:10:34:17 | password | $@ flows to a logging call. | main.go:34:10:34:17 | password | Sensitive data returned by an access to password |
| main.go:35:15:35:22 | password | main.go:35:15:35:22 | password | main.go:35:15:35:22 | password | $@ flows to a logging call. | main.go:35:15:35:22 | password | Sensitive data returned by an access to password |
| main.go:36:11:36:18 | password | main.go:36:11:36:18 | password | main.go:36:11:36:18 | password | $@ flows to a logging call. | main.go:36:11:36:18 | password | Sensitive data returned by an access to password |
| main.go:37:12:37:19 | password | main.go:37:12:37:19 | password | main.go:37:12:37:19 | password | $@ flows to a logging call. | main.go:37:12:37:19 | password | Sensitive data returned by an access to password |
| main.go:38:10:38:17 | password | main.go:38:10:38:17 | password | main.go:38:10:38:17 | password | $@ flows to a logging call. | main.go:38:10:38:17 | password | Sensitive data returned by an access to password |
| main.go:39:15:39:22 | password | main.go:39:15:39:22 | password | main.go:39:15:39:22 | password | $@ flows to a logging call. | main.go:39:15:39:22 | password | Sensitive data returned by an access to password |
| main.go:40:11:40:18 | password | main.go:40:11:40:18 | password | main.go:40:11:40:18 | password | $@ flows to a logging call. | main.go:40:11:40:18 | password | Sensitive data returned by an access to password |
| main.go:41:12:41:19 | password | main.go:41:12:41:19 | password | main.go:41:12:41:19 | password | $@ flows to a logging call. | main.go:41:12:41:19 | password | Sensitive data returned by an access to password |
| main.go:42:14:42:21 | password | main.go:42:14:42:21 | password | main.go:42:14:42:21 | password | $@ flows to a logging call. | main.go:42:14:42:21 | password | Sensitive data returned by an access to password |
| main.go:44:12:44:19 | password | main.go:44:12:44:19 | password | main.go:44:12:44:19 | password | $@ flows to a logging call. | main.go:44:12:44:19 | password | Sensitive data returned by an access to password |
| main.go:45:17:45:24 | password | main.go:45:17:45:24 | password | main.go:45:17:45:24 | password | $@ flows to a logging call. | main.go:45:17:45:24 | password | Sensitive data returned by an access to password |
| main.go:52:35:52:42 | password | main.go:52:35:52:42 | password | main.go:52:35:52:42 | password | $@ flows to a logging call. | main.go:52:35:52:42 | password | Sensitive data returned by an access to password |
| klog.go:23:15:23:20 | header | klog.go:21:30:21:37 | selection of Header | klog.go:23:15:23:20 | header | $@ flows to a logging call. | klog.go:21:30:21:37 | selection of Header | Sensitive data returned by HTTP request headers |
| klog.go:29:13:29:41 | call to Get | klog.go:29:13:29:20 | selection of Header | klog.go:29:13:29:41 | call to Get | $@ flows to a logging call. | klog.go:29:13:29:20 | selection of Header | Sensitive data returned by HTTP request headers |
| main.go:16:12:16:19 | password | main.go:16:12:16:19 | password | main.go:16:12:16:19 | password | $@ flows to a logging call. | main.go:16:12:16:19 | password | Sensitive data returned by an access to password |
| main.go:17:19:17:26 | password | main.go:17:19:17:26 | password | main.go:17:19:17:26 | password | $@ flows to a logging call. | main.go:17:19:17:26 | password | Sensitive data returned by an access to password |
| main.go:18:13:18:20 | password | main.go:18:13:18:20 | password | main.go:18:13:18:20 | password | $@ flows to a logging call. | main.go:18:13:18:20 | password | Sensitive data returned by an access to password |
| main.go:19:14:19:21 | password | main.go:19:14:19:21 | password | main.go:19:14:19:21 | password | $@ flows to a logging call. | main.go:19:14:19:21 | password | Sensitive data returned by an access to password |
| main.go:20:12:20:19 | password | main.go:20:12:20:19 | password | main.go:20:12:20:19 | password | $@ flows to a logging call. | main.go:20:12:20:19 | password | Sensitive data returned by an access to password |
| main.go:21:19:21:26 | password | main.go:21:19:21:26 | password | main.go:21:19:21:26 | password | $@ flows to a logging call. | main.go:21:19:21:26 | password | Sensitive data returned by an access to password |
| main.go:22:13:22:20 | password | main.go:22:13:22:20 | password | main.go:22:13:22:20 | password | $@ flows to a logging call. | main.go:22:13:22:20 | password | Sensitive data returned by an access to password |
| main.go:23:14:23:21 | password | main.go:23:14:23:21 | password | main.go:23:14:23:21 | password | $@ flows to a logging call. | main.go:23:14:23:21 | password | Sensitive data returned by an access to password |
| main.go:24:12:24:19 | password | main.go:24:12:24:19 | password | main.go:24:12:24:19 | password | $@ flows to a logging call. | main.go:24:12:24:19 | password | Sensitive data returned by an access to password |
| main.go:25:19:25:26 | password | main.go:25:19:25:26 | password | main.go:25:19:25:26 | password | $@ flows to a logging call. | main.go:25:19:25:26 | password | Sensitive data returned by an access to password |
| main.go:26:13:26:20 | password | main.go:26:13:26:20 | password | main.go:26:13:26:20 | password | $@ flows to a logging call. | main.go:26:13:26:20 | password | Sensitive data returned by an access to password |
| main.go:27:14:27:21 | password | main.go:27:14:27:21 | password | main.go:27:14:27:21 | password | $@ flows to a logging call. | main.go:27:14:27:21 | password | Sensitive data returned by an access to password |
| main.go:28:16:28:23 | password | main.go:28:16:28:23 | password | main.go:28:16:28:23 | password | $@ flows to a logging call. | main.go:28:16:28:23 | password | Sensitive data returned by an access to password |
| main.go:32:10:32:17 | password | main.go:32:10:32:17 | password | main.go:32:10:32:17 | password | $@ flows to a logging call. | main.go:32:10:32:17 | password | Sensitive data returned by an access to password |
| main.go:33:17:33:24 | password | main.go:33:17:33:24 | password | main.go:33:17:33:24 | password | $@ flows to a logging call. | main.go:33:17:33:24 | password | Sensitive data returned by an access to password |
| main.go:34:11:34:18 | password | main.go:34:11:34:18 | password | main.go:34:11:34:18 | password | $@ flows to a logging call. | main.go:34:11:34:18 | password | Sensitive data returned by an access to password |
| main.go:35:12:35:19 | password | main.go:35:12:35:19 | password | main.go:35:12:35:19 | password | $@ flows to a logging call. | main.go:35:12:35:19 | password | Sensitive data returned by an access to password |
| main.go:36:10:36:17 | password | main.go:36:10:36:17 | password | main.go:36:10:36:17 | password | $@ flows to a logging call. | main.go:36:10:36:17 | password | Sensitive data returned by an access to password |
| main.go:37:17:37:24 | password | main.go:37:17:37:24 | password | main.go:37:17:37:24 | password | $@ flows to a logging call. | main.go:37:17:37:24 | password | Sensitive data returned by an access to password |
| main.go:38:11:38:18 | password | main.go:38:11:38:18 | password | main.go:38:11:38:18 | password | $@ flows to a logging call. | main.go:38:11:38:18 | password | Sensitive data returned by an access to password |
| main.go:39:12:39:19 | password | main.go:39:12:39:19 | password | main.go:39:12:39:19 | password | $@ flows to a logging call. | main.go:39:12:39:19 | password | Sensitive data returned by an access to password |
| main.go:40:10:40:17 | password | main.go:40:10:40:17 | password | main.go:40:10:40:17 | password | $@ flows to a logging call. | main.go:40:10:40:17 | password | Sensitive data returned by an access to password |
| main.go:41:17:41:24 | password | main.go:41:17:41:24 | password | main.go:41:17:41:24 | password | $@ flows to a logging call. | main.go:41:17:41:24 | password | Sensitive data returned by an access to password |
| main.go:42:11:42:18 | password | main.go:42:11:42:18 | password | main.go:42:11:42:18 | password | $@ flows to a logging call. | main.go:42:11:42:18 | password | Sensitive data returned by an access to password |
| main.go:43:12:43:19 | password | main.go:43:12:43:19 | password | main.go:43:12:43:19 | password | $@ flows to a logging call. | main.go:43:12:43:19 | password | Sensitive data returned by an access to password |
| main.go:44:14:44:21 | password | main.go:44:14:44:21 | password | main.go:44:14:44:21 | password | $@ flows to a logging call. | main.go:44:14:44:21 | password | Sensitive data returned by an access to password |
| main.go:47:12:47:19 | password | main.go:47:12:47:19 | password | main.go:47:12:47:19 | password | $@ flows to a logging call. | main.go:47:12:47:19 | password | Sensitive data returned by an access to password |
| main.go:48:17:48:24 | password | main.go:48:17:48:24 | password | main.go:48:17:48:24 | password | $@ flows to a logging call. | main.go:48:17:48:24 | password | Sensitive data returned by an access to password |
| main.go:55:35:55:42 | password | main.go:55:35:55:42 | password | main.go:55:35:55:42 | password | $@ flows to a logging call. | main.go:55:35:55:42 | password | Sensitive data returned by an access to password |
| overrides.go:13:14:13:23 | call to String | overrides.go:9:9:9:16 | password | overrides.go:13:14:13:23 | call to String | $@ flows to a logging call. | overrides.go:9:9:9:16 | password | Sensitive data returned by an access to password |
| passwords.go:9:14:9:14 | x | passwords.go:30:8:30:15 | password | passwords.go:9:14:9:14 | x | $@ flows to a logging call. | passwords.go:30:8:30:15 | password | Sensitive data returned by an access to password |
| passwords.go:25:14:25:21 | password | passwords.go:25:14:25:21 | password | passwords.go:25:14:25:21 | password | $@ flows to a logging call. | passwords.go:25:14:25:21 | password | Sensitive data returned by an access to password |
@@ -55,11 +55,11 @@
| passwords.go:127:14:127:21 | selection of y | passwords.go:122:13:122:25 | call to getPassword | passwords.go:127:14:127:21 | selection of y | $@ flows to a logging call. | passwords.go:122:13:122:25 | call to getPassword | Sensitive data returned by a call to getPassword |
| protobuf.go:14:14:14:35 | call to GetDescription | protobuf.go:12:22:12:29 | password | protobuf.go:14:14:14:35 | call to GetDescription | $@ flows to a logging call. | protobuf.go:12:22:12:29 | password | Sensitive data returned by an access to password |
edges
| klog.go:20:3:25:3 | range statement[1] | klog.go:21:27:21:33 | headers | provenance | |
| klog.go:20:30:20:37 | selection of Header | klog.go:20:3:25:3 | range statement[1] | provenance | Src:MaD:1 Config |
| klog.go:21:4:24:4 | range statement[1] | klog.go:22:15:22:20 | header | provenance | |
| klog.go:21:27:21:33 | headers | klog.go:21:4:24:4 | range statement[1] | provenance | Config |
| klog.go:28:13:28:20 | selection of Header | klog.go:28:13:28:41 | call to Get | provenance | Src:MaD:1 Config |
| klog.go:21:3:26:3 | range statement[1] | klog.go:22:27:22:33 | headers | provenance | |
| klog.go:21:30:21:37 | selection of Header | klog.go:21:3:26:3 | range statement[1] | provenance | Src:MaD:1 Config |
| klog.go:22:4:25:4 | range statement[1] | klog.go:23:15:23:20 | header | provenance | |
| klog.go:22:27:22:33 | headers | klog.go:22:4:25:4 | range statement[1] | provenance | Config |
| klog.go:29:13:29:20 | selection of Header | klog.go:29:13:29:41 | call to Get | provenance | Src:MaD:1 Config |
| overrides.go:9:9:9:16 | password | overrides.go:13:14:13:23 | call to String | provenance | |
| passwords.go:8:12:8:12 | definition of x | passwords.go:9:14:9:14 | x | provenance | |
| passwords.go:30:8:30:15 | password | passwords.go:8:12:8:12 | definition of x | provenance | |
@@ -101,42 +101,42 @@ edges
models
| 1 | Source: net/http; Request; true; Header; ; ; ; remote; manual |
nodes
| klog.go:20:3:25:3 | range statement[1] | semmle.label | range statement[1] |
| klog.go:20:30:20:37 | selection of Header | semmle.label | selection of Header |
| klog.go:21:4:24:4 | range statement[1] | semmle.label | range statement[1] |
| klog.go:21:27:21:33 | headers | semmle.label | headers |
| klog.go:22:15:22:20 | header | semmle.label | header |
| klog.go:28:13:28:20 | selection of Header | semmle.label | selection of Header |
| klog.go:28:13:28:41 | call to Get | semmle.label | call to Get |
| main.go:15:12:15:19 | password | semmle.label | password |
| main.go:16:17:16:24 | password | semmle.label | password |
| main.go:17:13:17:20 | password | semmle.label | password |
| main.go:18:14:18:21 | password | semmle.label | password |
| main.go:19:12:19:19 | password | semmle.label | password |
| main.go:20:17:20:24 | password | semmle.label | password |
| main.go:21:13:21:20 | password | semmle.label | password |
| main.go:22:14:22:21 | password | semmle.label | password |
| main.go:23:12:23:19 | password | semmle.label | password |
| main.go:24:17:24:24 | password | semmle.label | password |
| main.go:25:13:25:20 | password | semmle.label | password |
| main.go:26:14:26:21 | password | semmle.label | password |
| main.go:27:16:27:23 | password | semmle.label | password |
| main.go:30:10:30:17 | password | semmle.label | password |
| main.go:31:15:31:22 | password | semmle.label | password |
| main.go:32:11:32:18 | password | semmle.label | password |
| main.go:33:12:33:19 | password | semmle.label | password |
| main.go:34:10:34:17 | password | semmle.label | password |
| main.go:35:15:35:22 | password | semmle.label | password |
| main.go:36:11:36:18 | password | semmle.label | password |
| main.go:37:12:37:19 | password | semmle.label | password |
| main.go:38:10:38:17 | password | semmle.label | password |
| main.go:39:15:39:22 | password | semmle.label | password |
| main.go:40:11:40:18 | password | semmle.label | password |
| main.go:41:12:41:19 | password | semmle.label | password |
| main.go:42:14:42:21 | password | semmle.label | password |
| main.go:44:12:44:19 | password | semmle.label | password |
| main.go:45:17:45:24 | password | semmle.label | password |
| main.go:52:35:52:42 | password | semmle.label | password |
| klog.go:21:3:26:3 | range statement[1] | semmle.label | range statement[1] |
| klog.go:21:30:21:37 | selection of Header | semmle.label | selection of Header |
| klog.go:22:4:25:4 | range statement[1] | semmle.label | range statement[1] |
| klog.go:22:27:22:33 | headers | semmle.label | headers |
| klog.go:23:15:23:20 | header | semmle.label | header |
| klog.go:29:13:29:20 | selection of Header | semmle.label | selection of Header |
| klog.go:29:13:29:41 | call to Get | semmle.label | call to Get |
| main.go:16:12:16:19 | password | semmle.label | password |
| main.go:17:19:17:26 | password | semmle.label | password |
| main.go:18:13:18:20 | password | semmle.label | password |
| main.go:19:14:19:21 | password | semmle.label | password |
| main.go:20:12:20:19 | password | semmle.label | password |
| main.go:21:19:21:26 | password | semmle.label | password |
| main.go:22:13:22:20 | password | semmle.label | password |
| main.go:23:14:23:21 | password | semmle.label | password |
| main.go:24:12:24:19 | password | semmle.label | password |
| main.go:25:19:25:26 | password | semmle.label | password |
| main.go:26:13:26:20 | password | semmle.label | password |
| main.go:27:14:27:21 | password | semmle.label | password |
| main.go:28:16:28:23 | password | semmle.label | password |
| main.go:32:10:32:17 | password | semmle.label | password |
| main.go:33:17:33:24 | password | semmle.label | password |
| main.go:34:11:34:18 | password | semmle.label | password |
| main.go:35:12:35:19 | password | semmle.label | password |
| main.go:36:10:36:17 | password | semmle.label | password |
| main.go:37:17:37:24 | password | semmle.label | password |
| main.go:38:11:38:18 | password | semmle.label | password |
| main.go:39:12:39:19 | password | semmle.label | password |
| main.go:40:10:40:17 | password | semmle.label | password |
| main.go:41:17:41:24 | password | semmle.label | password |
| main.go:42:11:42:18 | password | semmle.label | password |
| main.go:43:12:43:19 | password | semmle.label | password |
| main.go:44:14:44:21 | password | semmle.label | password |
| main.go:47:12:47:19 | password | semmle.label | password |
| main.go:48:17:48:24 | password | semmle.label | password |
| main.go:55:35:55:42 | password | semmle.label | password |
| overrides.go:9:9:9:16 | password | semmle.label | password |
| overrides.go:13:14:13:23 | call to String | semmle.label | call to String |
| passwords.go:8:12:8:12 | definition of x | semmle.label | definition of x |

View File

@@ -1,2 +1,4 @@
query: Security/CWE-312/CleartextLogging.ql
postprocess: utils/test/PrettyPrintModels.ql
postprocess:
- utils/test/PrettyPrintModels.ql
- utils/test/InlineExpectationsTestQuery.ql

View File

@@ -3,9 +3,10 @@ package main
//go:generate depstubber -vendor k8s.io/klog "" Info
import (
"k8s.io/klog"
"net/http"
"strings"
"k8s.io/klog"
)
func mask(key, value string) string {
@@ -17,15 +18,15 @@ func mask(key, value string) string {
func klogTest() {
http.HandleFunc("/klog", func(w http.ResponseWriter, r *http.Request) {
for name, headers := range r.Header {
for name, headers := range r.Header { // $ Source
for _, header := range headers {
klog.Info(header) // NOT OK
klog.Info(header) // $ Alert
klog.Info(mask(name, header)) // OK
}
}
klog.Info(r.Header.Get("Accept")) // OK
klog.Info(r.Header["Content-Type"]) // OK
klog.Info(r.Header.Get("Authorization")) // NOT OK
klog.Info(r.Header.Get("Authorization")) // $ Alert
})
http.ListenAndServe(":80", nil)
}

View File

@@ -4,51 +4,54 @@ package main
//go:generate depstubber -vendor github.com/golang/glog "" Info
import (
"log"
"github.com/golang/glog"
"github.com/sirupsen/logrus"
"log"
)
func main() {
password := "P4ssw0rd"
log.Print(password)
log.Printf("", password)
log.Printf(password, "")
log.Println(password)
log.Fatal(password)
log.Fatalf("", password)
log.Fatalf(password, "")
log.Fatalln(password)
log.Panic(password)
log.Panicf("", password)
log.Panicf(password, "")
log.Panicln(password)
log.Output(0, password)
log.Print(password) // $ Alert
log.Printf("%s", password) // $ Alert
log.Printf(password, "") // $ Alert
log.Println(password) // $ Alert
log.Fatal(password) // $ Alert
log.Fatalf("%s", password) // $ Alert
log.Fatalf(password, "") // $ Alert
log.Fatalln(password) // $ Alert
log.Panic(password) // $ Alert
log.Panicf("%s", password) // $ Alert
log.Panicf(password, "") // $ Alert
log.Panicln(password) // $ Alert
log.Output(0, password) // $ Alert
log.Printf("%T", password)
l := log.Default()
l.Print(password)
l.Printf("", password)
l.Printf(password, "")
l.Println(password)
l.Fatal(password)
l.Fatalf("", password)
l.Fatalf(password, "")
l.Fatalln(password)
l.Panic(password)
l.Panicf("", password)
l.Panicf(password, "")
l.Panicln(password)
l.Output(0, password)
l.Print(password) // $ Alert
l.Printf("%s", password) // $ Alert
l.Printf(password, "") // $ Alert
l.Println(password) // $ Alert
l.Fatal(password) // $ Alert
l.Fatalf("%s", password) // $ Alert
l.Fatalf(password, "") // $ Alert
l.Fatalln(password) // $ Alert
l.Panic(password) // $ Alert
l.Panicf("%s", password) // $ Alert
l.Panicf(password, "") // $ Alert
l.Panicln(password) // $ Alert
l.Output(0, password) // $ Alert
l.Printf("%T", password)
glog.Info(password)
logrus.Warning(password)
glog.Info(password) // $ Alert
logrus.Warning(password) // $ Alert
fields := make(logrus.Fields)
fields["pass"] = password
entry := logrus.WithFields(fields)
entry.Errorf("")
entry = logrus.WithField("pass", password)
entry = logrus.WithField("pass", password) // $ Alert
entry.Panic("")
}

View File

@@ -6,10 +6,10 @@ type s struct{}
func (_ s) String() string {
password := "horsebatterystaplecorrect"
return password
return password // $ Source
}
func overrideTest(x s, y fmt.Stringer) {
fmt.Println(x.String()) // NOT OK
fmt.Println(x.String()) // $ Alert
fmt.Println(y.String()) // OK
}

View File

@@ -6,7 +6,7 @@ import (
)
func myLog(x string) {
log.Println(x) // NOT OK
log.Println(x) // $ Alert
}
func redact(kind, value string) string {
@@ -22,33 +22,33 @@ func test() {
x := "horsebatterystapleincorrect"
var o passStruct
log.Println(password) // NOT OK
log.Println(o.password) // NOT OK
log.Println(getPassword()) // NOT OK
log.Println(o.getPassword()) // NOT OK
log.Println(password) // $ Alert
log.Println(o.password) // $ Alert
log.Println(getPassword()) // $ Alert
log.Println(o.getPassword()) // $ Alert
myLog(password)
myLog(password) // $ Source
log.Panic(password) // NOT OK
log.Panic(password) // $ Alert
log.Println(name + ", " + password) // NOT OK
log.Println(name + ", " + password) // $ Alert
obj1 := passStruct{
password: x,
password: x, // $ Source
}
log.Println(obj1) // NOT OK
log.Println(obj1) // $ Alert
obj2 := xStruct{
x: password,
x: password, // $ Source
}
log.Println(obj2) // NOT OK
log.Println(obj2) // $ Alert
var obj3 xStruct
log.Println(obj3) // caught because of the below line
obj3.x = password // NOT OK
log.Println(obj3) // $ SPURIOUS: Alert // caught because of the below line and def-use flow
obj3.x = password // $ Source
fixed_password := "cowbatterystaplecorrect"
log.Println(fixed_password) // Probably OK, but caught
log.Println(fixed_password) // $ Alert // Probably OK
log.Println(IncorrectPasswordError) // OK
@@ -83,12 +83,12 @@ func test() {
log.Println(password_sha) // OK
utilityObject := passSetStruct{
passwordSet: make(map[string]bool),
passwordSet: make(map[string]bool), // $ Source
}
log.Println(utilityObject) // NOT OK
log.Println(utilityObject) // $ Alert
secret := password
log.Printf("pw: %s", secret) // NOT OK
secret := password // $ Source
log.Printf("pw: %s", secret) // $ Alert
log.Println("Password is: " + redact("password", password))
@@ -98,33 +98,33 @@ func test() {
if t.test(y) {
f()
// ...
log.Println("Password is: " + password) // NOT OK
log.Println("Password is: " + password) // $ Alert
// ...
}
if t.test(y) {
if f() {
log.Println("Password is: " + password) // NOT OK
log.Println("Password is: " + password) // $ Alert
}
}
if os.Getenv("APP_ENV") != "production" {
log.Println("Password is: " + password) // OK, but still flagged
log.Println("Password is: " + password) // $ SPURIOUS: Alert
}
var password1 stringable = stringable{"arstneio"}
log.Println(name + ", " + password1.String()) // NOT OK
log.Println(name + ", " + password1.String()) // $ Alert
config := Config{
password: x,
password: x, // $ Source
hostname: "tarski",
x: password,
y: getPassword(),
x: password, // $ Source
y: getPassword(), // $ Source
}
log.Println(config.hostname) // OK
log.Println(config) // NOT OK
log.Println(config.x) // NOT OK
log.Println(config.y) // NOT OK
log.Println(config) // $ Alert
log.Println(config.x) // $ Alert
log.Println(config.y) // $ Alert
obj4 := xStruct{
x: "aaaaa",

View File

@@ -9,8 +9,8 @@ func testProtobuf() {
password := "P@ssw0rd"
query := &query.Query{}
query.Description = password
query.Description = password // $ Source
log.Println(query.GetDescription()) // NOT OK
log.Println(query.GetDescription()) // $ Alert
log.Println(query.GetId()) // OK
}

View File

@@ -4,7 +4,8 @@ import ssl
httpd = HTTPServer(('localhost', 4443), SimpleHTTPRequestHandler)
sslctx = ssl.SSLContext()
sslctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
sslctx.options |= ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1
sslctx.load_cert_chain(certfile="../cert.pem", keyfile="../key.pem")
httpd.socket = sslctx.wrap_socket (httpd.socket, server_side=True)

View File

@@ -66,6 +66,19 @@ class TestClass extends Class {
}
}
/**
* A class that is likely a test class. That is either a definite test class, or
* a class whose name, package, or location suggests that it might be a test class.
*/
class LikelyTestClass extends Class {
LikelyTestClass() {
this instanceof TestClass or
this.getName().toLowerCase().matches("%test%") or
this.getPackage().getName().toLowerCase().matches("%test%") or
this.getLocation().getFile().getAbsolutePath().matches("%/src/test/java%")
}
}
/**
* A test method declared within a JUnit 3.8 test class.
*/
@@ -185,6 +198,37 @@ class TestMethod extends Method {
}
}
/**
* A method that is likely a test method.
*/
class LikelyTestMethod extends Method {
LikelyTestMethod() {
this.getDeclaringType() instanceof LikelyTestClass
or
this instanceof TestMethod
or
this instanceof LikelyJunitTest
}
}
/**
* A `Method` that is public, has no parameters,
* has a "void" return type, AND either has a name that starts with "test" OR
* has an annotation that ends with "Test"
*/
class LikelyJunitTest extends Method {
LikelyJunitTest() {
this.isPublic() and
this.getReturnType().hasName("void") and
this.hasNoParameters() and
(
this.getName().matches("JUnit%") or
this.getName().matches("test%") or
this.getAnAnnotation().getType().getName().matches("%Test")
)
}
}
/**
* A TestNG annotation used to mark a method that runs "before".
*/

View File

@@ -0,0 +1,40 @@
## Overview
An empty method may indicate that an implementation was intended to be provided but was accidentally omitted. When using the method, it will not be clear that it does not provide an implementation and with dynamic dispatch, resolving to a blank method may result in unexpected program behavior.
## Recommendation
If a method is intended to be left empty, do one of the following to indicate that it is intentionally empty:
1. Mark it abstract in an abstract class
2. Place it in an interface (then it can be implicitly abstract)
3. Place a comment in that method that lets others know that the implementation was intentionally omitted
4. Add `UnsupportedOperationException` to the method (as in `java.util.Collection.add`).
## Example
```java
public class Test {
public void f1() { // COMPLIANT
// intentionally empty
}
public void f2() {} // NON_COMPLIANT
public void f3(){ throw new UnsupportedOperationException(); } // COMPLIANT
public abstract class TestInner {
public abstract void f(); // COMPLIANT - intentionally empty
}
}
```
## Implementation Notes
The rule excludes reporting methods that are annotated.
## References
- Java SE Documentation: [java.util.Collection.add](https://docs.oracle.com/en/java/javase/20/docs/api/java.base/java/util/Collection.html#add(E)).
- Wikipedia: [Template method pattern](https://en.wikipedia.org/wiki/Template_method_pattern).
- Common Weakness Enumeration: [CWE-1071](https://cwe.mitre.org/data/definitions/1071.html).

View File

@@ -0,0 +1,41 @@
/**
* @id java/empty-method
* @name Empty method
* @description An empty method serves no purpose and makes code less readable. An empty method may
* indicate an error on the part of the developer.
* @kind problem
* @precision medium
* @problem.severity recommendation
* @tags correctness
* maintainability
* readability
* quality
* external/cwe/cwe-1071
*/
import java
/**
* A `Method` from source that is not abstract, and likely not a test method
*/
class NonAbstractSource extends Method {
NonAbstractSource() {
this.fromSource() and
not this.isAbstract() and
not this instanceof LikelyTestMethod
}
}
from NonAbstractSource m
where
//empty
not exists(m.getBody().getAChild()) and
//permit comment lines explaining why this is empty
m.getNumberOfCommentLines() = 0 and
//permit a javadoc above as well as sufficient reason to leave empty
not exists(m.getDoc().getJavadoc()) and
//annotated methods are considered compliant
not exists(m.getAnAnnotation()) and
//native methods have no body
not m.isNative()
select m, "Empty method found."

View File

@@ -23,5 +23,5 @@ where
sink.getNode().asExpr() = c.getAlgoSpec() and
InsecureCryptoFlow::flowPath(source, sink)
select c, source, sink,
"Cryptographic algorithm $@ may not be secure, consider using a different algorithm.", source,
"Cryptographic algorithm $@ may not be secure. Consider using a different algorithm.", source,
source.getNode().asExpr().(InsecureAlgorithm).getStringValue()

View File

@@ -0,0 +1,4 @@
---
category: newQuery
---
* Added a new quality query, `java/empty-method`, to detect empty methods.

View File

@@ -10,9 +10,4 @@ import java
* c) in a test class whose name has the word `test`
* d) in a test class implementing a test framework such as JUnit or TestNG
*/
predicate isTestMethod(Method m) {
m.getDeclaringType().getName().toLowerCase().matches("%test%") or // Simple check to exclude test classes to reduce FPs
m.getDeclaringType().getPackage().getName().toLowerCase().matches("%test%") or // Simple check to exclude classes in test packages to reduce FPs
exists(m.getLocation().getFile().getAbsolutePath().indexOf("/src/test/java")) or // Match test directory structure of build tools like maven
m instanceof TestMethod // Test method of a test case implementing a test framework such as JUnit or TestNG
}
predicate isTestMethod(LikelyTestMethod m) { any() }

View File

@@ -0,0 +1,54 @@
import org.aspectj.lang.annotation.Pointcut;
public class Class1 {
// COMPLIANT
public void f() {
int i = 0;
}
// COMPLIANT
public void f1() {
// intentionally empty
}
// NON_COMPLIANT
public void f2() { } // $ Alert
// COMPLIANT - exception
@Pointcut()
public void f4() {
}
/**
* COMPLIANT - empty method with javadoc
*/
public void f5() {
}
public abstract class TestInner {
public abstract void f(); // COMPLIANT - intentionally empty
}
public class Derived extends TestInner {
// COMPLIANT: with annotation
@Override
public void f() {
}
// COMPLIANT: native
public native int nativeMethod();
}
public interface TestInterface {
// NON_COMPLIANT
default void method() { } // $ Alert
void method2(); // COMPLIANT
}
}

View File

@@ -0,0 +1,2 @@
| Class1.java:16:15:16:16 | f2 | Empty method found. |
| Class1.java:49:18:49:23 | method | Empty method found. |

View File

@@ -0,0 +1,2 @@
query: Language Abuse/EmptyMethod.ql
postprocess: utils/test/InlineExpectationsTestQuery.ql

View File

@@ -0,0 +1,5 @@
public class Test {
// COMPLIANT: allow empty method in test class
public void f() {
}
}

View File

@@ -0,0 +1 @@
//semmle-extractor-options: --javac-args -cp ${testdir}/../../stubs/aspectj

View File

@@ -6,7 +6,7 @@ nodes
| WeakHashing.java:21:56:21:91 | getProperty(...) | semmle.label | getProperty(...) |
subpaths
#select
| Test.java:34:21:34:53 | new SecretKeySpec(...) | Test.java:34:48:34:52 | "foo" | Test.java:34:48:34:52 | "foo" | Cryptographic algorithm $@ may not be secure, consider using a different algorithm. | Test.java:34:48:34:52 | "foo" | foo |
| WeakHashing.java:15:29:15:84 | getInstance(...) | WeakHashing.java:15:55:15:83 | getProperty(...) | WeakHashing.java:15:55:15:83 | getProperty(...) | Cryptographic algorithm $@ may not be secure, consider using a different algorithm. | WeakHashing.java:15:55:15:83 | getProperty(...) | MD5 |
| WeakHashing.java:18:30:18:96 | getInstance(...) | WeakHashing.java:18:56:18:95 | getProperty(...) | WeakHashing.java:18:56:18:95 | getProperty(...) | Cryptographic algorithm $@ may not be secure, consider using a different algorithm. | WeakHashing.java:18:56:18:95 | getProperty(...) | MD5 |
| WeakHashing.java:21:30:21:92 | getInstance(...) | WeakHashing.java:21:56:21:91 | getProperty(...) | WeakHashing.java:21:56:21:91 | getProperty(...) | Cryptographic algorithm $@ may not be secure, consider using a different algorithm. | WeakHashing.java:21:56:21:91 | getProperty(...) | MD5 |
| Test.java:34:21:34:53 | new SecretKeySpec(...) | Test.java:34:48:34:52 | "foo" | Test.java:34:48:34:52 | "foo" | Cryptographic algorithm $@ may not be secure. Consider using a different algorithm. | Test.java:34:48:34:52 | "foo" | foo |
| WeakHashing.java:15:29:15:84 | getInstance(...) | WeakHashing.java:15:55:15:83 | getProperty(...) | WeakHashing.java:15:55:15:83 | getProperty(...) | Cryptographic algorithm $@ may not be secure. Consider using a different algorithm. | WeakHashing.java:15:55:15:83 | getProperty(...) | MD5 |
| WeakHashing.java:18:30:18:96 | getInstance(...) | WeakHashing.java:18:56:18:95 | getProperty(...) | WeakHashing.java:18:56:18:95 | getProperty(...) | Cryptographic algorithm $@ may not be secure. Consider using a different algorithm. | WeakHashing.java:18:56:18:95 | getProperty(...) | MD5 |
| WeakHashing.java:21:30:21:92 | getInstance(...) | WeakHashing.java:21:56:21:91 | getProperty(...) | WeakHashing.java:21:56:21:91 | getProperty(...) | Cryptographic algorithm $@ may not be secure. Consider using a different algorithm. | WeakHashing.java:21:56:21:91 | getProperty(...) | MD5 |

View File

@@ -0,0 +1,279 @@
Per: https://www.eclipse.org/org/documents/epl-2.0/EPL-2.0.txt
Eclipse Public License - v 2.0
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE
PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION
OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
1. DEFINITIONS
"Contribution" means:
a) in the case of the initial Contributor, the initial content
Distributed under this Agreement, and
b) in the case of each subsequent Contributor:
i) changes to the Program, and
ii) additions to the Program;
where such changes and/or additions to the Program originate from
and are Distributed by that particular Contributor. A Contribution
"originates" from a Contributor if it was added to the Program by
such Contributor itself or anyone acting on such Contributor's behalf.
Contributions do not include changes or additions to the Program that
are not Modified Works.
"Contributor" means any person or entity that Distributes the Program.
"Licensed Patents" mean patent claims licensable by a Contributor which
are necessarily infringed by the use or sale of its Contribution alone
or when combined with the Program.
"Program" means the Contributions Distributed in accordance with this
Agreement.
"Recipient" means anyone who receives the Program under this Agreement
or any Secondary License (as applicable), including Contributors.
"Derivative Works" shall mean any work, whether in Source Code or other
form, that is based on (or derived from) the Program and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship.
"Modified Works" shall mean any work in Source Code or other form that
results from an addition to, deletion from, or modification of the
contents of the Program, including, for purposes of clarity any new file
in Source Code form that contains any contents of the Program. Modified
Works shall not include works that contain only declarations,
interfaces, types, classes, structures, or files of the Program solely
in each case in order to link to, bind by name, or subclass the Program
or Modified Works thereof.
"Distribute" means the acts of a) distributing or b) making available
in any manner that enables the transfer of a copy.
"Source Code" means the form of a Program preferred for making
modifications, including but not limited to software source code,
documentation source, and configuration files.
"Secondary License" means either the GNU General Public License,
Version 2.0, or any later versions of that license, including any
exceptions or additional permissions as identified by the initial
Contributor.
2. GRANT OF RIGHTS
a) Subject to the terms of this Agreement, each Contributor hereby
grants Recipient a non-exclusive, worldwide, royalty-free copyright
license to reproduce, prepare Derivative Works of, publicly display,
publicly perform, Distribute and sublicense the Contribution of such
Contributor, if any, and such Derivative Works.
b) Subject to the terms of this Agreement, each Contributor hereby
grants Recipient a non-exclusive, worldwide, royalty-free patent
license under Licensed Patents to make, use, sell, offer to sell,
import and otherwise transfer the Contribution of such Contributor,
if any, in Source Code or other form. This patent license shall
apply to the combination of the Contribution and the Program if, at
the time the Contribution is added by the Contributor, such addition
of the Contribution causes such combination to be covered by the
Licensed Patents. The patent license shall not apply to any other
combinations which include the Contribution. No hardware per se is
licensed hereunder.
c) Recipient understands that although each Contributor grants the
licenses to its Contributions set forth herein, no assurances are
provided by any Contributor that the Program does not infringe the
patent or other intellectual property rights of any other entity.
Each Contributor disclaims any liability to Recipient for claims
brought by any other entity based on infringement of intellectual
property rights or otherwise. As a condition to exercising the
rights and licenses granted hereunder, each Recipient hereby
assumes sole responsibility to secure any other intellectual
property rights needed, if any. For example, if a third party
patent license is required to allow Recipient to Distribute the
Program, it is Recipient's responsibility to acquire that license
before distributing the Program.
d) Each Contributor represents that to its knowledge it has
sufficient copyright rights in its Contribution, if any, to grant
the copyright license set forth in this Agreement.
e) Notwithstanding the terms of any Secondary License, no
Contributor makes additional grants to any Recipient (other than
those set forth in this Agreement) as a result of such Recipient's
receipt of the Program under the terms of a Secondary License
(if permitted under the terms of Section 3).
3. REQUIREMENTS
3.1 If a Contributor Distributes the Program in any form, then:
a) the Program must also be made available as Source Code, in
accordance with section 3.2, and the Contributor must accompany
the Program with a statement that the Source Code for the Program
is available under this Agreement, and informs Recipients how to
obtain it in a reasonable manner on or through a medium customarily
used for software exchange; and
b) the Contributor may Distribute the Program under a license
different than this Agreement, provided that such license:
i) effectively disclaims on behalf of all other Contributors all
warranties and conditions, express and implied, including
warranties or conditions of title and non-infringement, and
implied warranties or conditions of merchantability and fitness
for a particular purpose;
ii) effectively excludes on behalf of all other Contributors all
liability for damages, including direct, indirect, special,
incidental and consequential damages, such as lost profits;
iii) does not attempt to limit or alter the recipients' rights
in the Source Code under section 3.2; and
iv) requires any subsequent distribution of the Program by any
party to be under a license that satisfies the requirements
of this section 3.
3.2 When the Program is Distributed as Source Code:
a) it must be made available under this Agreement, or if the
Program (i) is combined with other material in a separate file or
files made available under a Secondary License, and (ii) the initial
Contributor attached to the Source Code the notice described in
Exhibit A of this Agreement, then the Program may be made available
under the terms of such Secondary Licenses, and
b) a copy of this Agreement must be included with each copy of
the Program.
3.3 Contributors may not remove or alter any copyright, patent,
trademark, attribution notices, disclaimers of warranty, or limitations
of liability ("notices") contained within the Program from any copy of
the Program which they Distribute, provided that Contributors may add
their own appropriate notices.
4. COMMERCIAL DISTRIBUTION
Commercial distributors of software may accept certain responsibilities
with respect to end users, business partners and the like. While this
license is intended to facilitate the commercial use of the Program,
the Contributor who includes the Program in a commercial product
offering should do so in a manner which does not create potential
liability for other Contributors. Therefore, if a Contributor includes
the Program in a commercial product offering, such Contributor
("Commercial Contributor") hereby agrees to defend and indemnify every
other Contributor ("Indemnified Contributor") against any losses,
damages and costs (collectively "Losses") arising from claims, lawsuits
and other legal actions brought by a third party against the Indemnified
Contributor to the extent caused by the acts or omissions of such
Commercial Contributor in connection with its distribution of the Program
in a commercial product offering. The obligations in this section do not
apply to any claims or Losses relating to any actual or alleged
intellectual property infringement. In order to qualify, an Indemnified
Contributor must: a) promptly notify the Commercial Contributor in
writing of such claim, and b) allow the Commercial Contributor to control,
and cooperate with the Commercial Contributor in, the defense and any
related settlement negotiations. The Indemnified Contributor may
participate in any such claim at its own expense.
For example, a Contributor might include the Program in a commercial
product offering, Product X. That Contributor is then a Commercial
Contributor. If that Commercial Contributor then makes performance
claims, or offers warranties related to Product X, those performance
claims and warranties are such Commercial Contributor's responsibility
alone. Under this section, the Commercial Contributor would have to
defend claims against the other Contributors related to those performance
claims and warranties, and if a court requires any other Contributor to
pay any damages as a result, the Commercial Contributor must pay
those damages.
5. NO WARRANTY
EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT
PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN "AS IS"
BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR
IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF
TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR
PURPOSE. Each Recipient is solely responsible for determining the
appropriateness of using and distributing the Program and assumes all
risks associated with its exercise of rights under this Agreement,
including but not limited to the risks and costs of program errors,
compliance with applicable laws, damage to or loss of data, programs
or equipment, and unavailability or interruption of operations.
6. DISCLAIMER OF LIABILITY
EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT
PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS
SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST
PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE
EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
7. GENERAL
If any provision of this Agreement is invalid or unenforceable under
applicable law, it shall not affect the validity or enforceability of
the remainder of the terms of this Agreement, and without further
action by the parties hereto, such provision shall be reformed to the
minimum extent necessary to make such provision valid and enforceable.
If Recipient institutes patent litigation against any entity
(including a cross-claim or counterclaim in a lawsuit) alleging that the
Program itself (excluding combinations of the Program with other software
or hardware) infringes such Recipient's patent(s), then such Recipient's
rights granted under Section 2(b) shall terminate as of the date such
litigation is filed.
All Recipient's rights under this Agreement shall terminate if it
fails to comply with any of the material terms or conditions of this
Agreement and does not cure such failure in a reasonable period of
time after becoming aware of such noncompliance. If all Recipient's
rights under this Agreement terminate, Recipient agrees to cease use
and distribution of the Program as soon as reasonably practicable.
However, Recipient's obligations under this Agreement and any licenses
granted by Recipient relating to the Program shall continue and survive.
Everyone is permitted to copy and distribute copies of this Agreement,
but in order to avoid inconsistency the Agreement is copyrighted and
may only be modified in the following manner. The Agreement Steward
reserves the right to publish new versions (including revisions) of
this Agreement from time to time. No one other than the Agreement
Steward has the right to modify this Agreement. The Eclipse Foundation
is the initial Agreement Steward. The Eclipse Foundation may assign the
responsibility to serve as the Agreement Steward to a suitable separate
entity. Each new version of the Agreement will be given a distinguishing
version number. The Program (including Contributions) may always be
Distributed subject to the version of the Agreement under which it was
received. In addition, after a new version of the Agreement is published,
Contributor may elect to Distribute the Program (including its
Contributions) under the new version.
Except as expressly stated in Sections 2(a) and 2(b) above, Recipient
receives no rights or licenses to the intellectual property of any
Contributor under this Agreement, whether expressly, by implication,
estoppel or otherwise. All rights in the Program not expressly granted
under this Agreement are reserved. Nothing in this Agreement is intended
to be enforceable by any entity that is not a Contributor or Recipient.
No third-party beneficiary rights are created under this Agreement.
Exhibit A - Form of Secondary Licenses Notice
"This Source Code may also be made available under the following
Secondary Licenses when the conditions for such availability set forth
in the Eclipse Public License, v. 2.0 are satisfied: {name license(s),
version(s), and exceptions or additional permissions here}."
Simply including a copy of this Agreement, including this Exhibit A
is not sufficient to license the Source Code under Secondary Licenses.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to
look for such a notice.
You may add additional accurate notices of copyright ownership.

View File

@@ -0,0 +1,27 @@
/*******************************************************************************
* Copyright (c) 2005 Contributors.
* All rights reserved.
* This program and the accompanying materials are made available
* under the terms of the Eclipse Public License v 2.0
* which accompanies this distribution and is available at
* https://www.eclipse.org/org/documents/epl-2.0/EPL-2.0.txt
*
* Contributors:
* initial implementation Alexandre Vasseur
*******************************************************************************/
package org.aspectj.lang.annotation;
import java.lang.annotation.Target;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Pointcut declaration
*
* @author Alexandre Vasseur
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Pointcut {}

View File

@@ -0,0 +1,37 @@
// Removes all nodes nested inside a qualified type access,
// and changes qualified type access nodes to "named type" nodes.
//
/*
* jsdoc_type_exprs (unique int id: @jsdoc_type_expr,
* int kind: int ref,
* int parent: @jsdoc_type_expr_parent ref,
* int idx: int ref,
* varchar(900) tostring: string ref);
*/
class JSDocTypeExprParent extends @jsdoc_type_expr_parent {
string toString() { none() }
}
class JSDocTypeExpr extends @jsdoc_type_expr {
string toString() { none() }
JSDocTypeExpr getChild(int n) { jsdoc_type_exprs(result, _, this, n, _) }
int getNewKind() { jsdoc_type_exprs(this, result, _, _, _) }
predicate shouldRemove() { this = any(JSDocQualifiedTypeAccess a).getChild(_) }
}
class JSDocQualifiedTypeAccess extends @jsdoc_qualified_type_expr, JSDocTypeExpr {
override int getNewKind() {
result = 5
/* 5 = @jsdoc_named_type_expr */
}
}
from JSDocTypeExpr node, JSDocTypeExprParent parent, int idx, string tostring
where
jsdoc_type_exprs(node, _, parent, idx, tostring) and
not node.shouldRemove()
select node, node.getNewKind(), parent, idx, tostring

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,4 @@
description: split up qualified names in jsdoc type exprs
compatibility: backwards
jsdoc_type_exprs.rel: run jsdoc_type_exprs.ql

View File

@@ -2,12 +2,12 @@ package com.semmle.js.ast.jsdoc;
import com.semmle.js.ast.SourceLocation;
/** A named JSDoc type. */
public class NameExpression extends JSDocTypeExpression {
/** An identifier in a JSDoc type. */
public class Identifier extends JSDocTypeExpression {
private final String name;
public NameExpression(SourceLocation loc, String name) {
super(loc, "NameExpression");
public Identifier(SourceLocation loc, String name) {
super(loc, "Identifier");
this.name = name;
}

View File

@@ -0,0 +1,35 @@
package com.semmle.js.ast.jsdoc;
import com.semmle.js.ast.SourceLocation;
/** A qualified name in a JSDoc type. */
public class QualifiedNameExpression extends JSDocTypeExpression {
private final JSDocTypeExpression base;
private final Identifier name;
public QualifiedNameExpression(SourceLocation loc, JSDocTypeExpression base, Identifier name) {
super(loc, "QualifiedNameExpression");
this.base = base;
this.name = name;
}
@Override
public void accept(Visitor v) {
v.visit(this);
}
/** Returns the expression on the left side of the dot character. */
public JSDocTypeExpression getBase() {
return base;
}
/** Returns the identifier on the right-hand side of the dot character. */
public Identifier getNameNode() {
return name;
}
@Override
public String pp() {
return base.pp() + "." + name.pp();
}
}

View File

@@ -10,7 +10,9 @@ public interface Visitor {
public void visit(JSDocTag nd);
public void visit(NameExpression nd);
public void visit(Identifier nd);
public void visit(QualifiedNameExpression nd);
public void visit(NullableLiteral nd);

View File

@@ -9,13 +9,14 @@ import com.semmle.js.ast.jsdoc.JSDocComment;
import com.semmle.js.ast.jsdoc.JSDocElement;
import com.semmle.js.ast.jsdoc.JSDocTag;
import com.semmle.js.ast.jsdoc.JSDocTypeExpression;
import com.semmle.js.ast.jsdoc.NameExpression;
import com.semmle.js.ast.jsdoc.Identifier;
import com.semmle.js.ast.jsdoc.NonNullableType;
import com.semmle.js.ast.jsdoc.NullLiteral;
import com.semmle.js.ast.jsdoc.NullableLiteral;
import com.semmle.js.ast.jsdoc.NullableType;
import com.semmle.js.ast.jsdoc.OptionalType;
import com.semmle.js.ast.jsdoc.ParameterType;
import com.semmle.js.ast.jsdoc.QualifiedNameExpression;
import com.semmle.js.ast.jsdoc.RecordType;
import com.semmle.js.ast.jsdoc.RestType;
import com.semmle.js.ast.jsdoc.TypeApplication;
@@ -42,7 +43,7 @@ public class JSDocExtractor {
jsdocTypeExprKinds.put("UndefinedLiteral", 2);
jsdocTypeExprKinds.put("NullableLiteral", 3);
jsdocTypeExprKinds.put("VoidLiteral", 4);
jsdocTypeExprKinds.put("NameExpression", 5);
jsdocTypeExprKinds.put("Identifier", 5);
jsdocTypeExprKinds.put("TypeApplication", 6);
jsdocTypeExprKinds.put("NullableType", 7);
jsdocTypeExprKinds.put("NonNullableType", 8);
@@ -52,6 +53,7 @@ public class JSDocExtractor {
jsdocTypeExprKinds.put("FunctionType", 12);
jsdocTypeExprKinds.put("OptionalType", 13);
jsdocTypeExprKinds.put("RestType", 14);
jsdocTypeExprKinds.put("QualifiedNameExpression", 15);
}
private final TrapWriter trapwriter;
@@ -122,10 +124,17 @@ public class JSDocExtractor {
}
@Override
public void visit(NameExpression nd) {
public void visit(Identifier nd) {
visit((JSDocTypeExpression) nd);
}
@Override
public void visit(QualifiedNameExpression nd) {
Label label = visit((JSDocTypeExpression) nd);
visit(nd.getBase(), label, 0);
visit(nd.getNameNode(), label, 1);
}
@Override
public void visit(NullableLiteral nd) {
visit((JSDocTypeExpression) nd);

View File

@@ -42,7 +42,7 @@ public class Main {
* A version identifier that should be updated every time the extractor changes in such a way that
* it may produce different tuples for the same file under the same {@link ExtractorConfig}.
*/
public static final String EXTRACTOR_VERSION = "2025-02-03";
public static final String EXTRACTOR_VERSION = "2025-03-20";
public static final Pattern NEWLINE = Pattern.compile("\n");

View File

@@ -10,13 +10,14 @@ import com.semmle.js.ast.jsdoc.FunctionType;
import com.semmle.js.ast.jsdoc.JSDocComment;
import com.semmle.js.ast.jsdoc.JSDocTag;
import com.semmle.js.ast.jsdoc.JSDocTypeExpression;
import com.semmle.js.ast.jsdoc.NameExpression;
import com.semmle.js.ast.jsdoc.Identifier;
import com.semmle.js.ast.jsdoc.NonNullableType;
import com.semmle.js.ast.jsdoc.NullLiteral;
import com.semmle.js.ast.jsdoc.NullableLiteral;
import com.semmle.js.ast.jsdoc.NullableType;
import com.semmle.js.ast.jsdoc.OptionalType;
import com.semmle.js.ast.jsdoc.ParameterType;
import com.semmle.js.ast.jsdoc.QualifiedNameExpression;
import com.semmle.js.ast.jsdoc.RecordType;
import com.semmle.js.ast.jsdoc.RestType;
import com.semmle.js.ast.jsdoc.TypeApplication;
@@ -70,30 +71,6 @@ public class JSDocParser {
return new JSDocComment(comment, r.fst(), tags);
}
/** Specification of Doctrine AST types for JSDoc type expressions. */
private static final Map<Class<? extends JSDocTypeExpression>, List<String>> spec =
new LinkedHashMap<Class<? extends JSDocTypeExpression>, List<String>>();
static {
spec.put(AllLiteral.class, Arrays.<String>asList());
spec.put(ArrayType.class, Arrays.asList("elements"));
spec.put(FieldType.class, Arrays.asList("key", "value"));
spec.put(FunctionType.class, Arrays.asList("this", "new", "params", "result"));
spec.put(NameExpression.class, Arrays.asList("name"));
spec.put(NonNullableType.class, Arrays.asList("expression", "prefix"));
spec.put(NullableLiteral.class, Arrays.<String>asList());
spec.put(NullLiteral.class, Arrays.<String>asList());
spec.put(NullableType.class, Arrays.asList("expression", "prefix"));
spec.put(OptionalType.class, Arrays.asList("expression"));
spec.put(ParameterType.class, Arrays.asList("name", "expression"));
spec.put(RecordType.class, Arrays.asList("fields"));
spec.put(RestType.class, Arrays.asList("expression"));
spec.put(TypeApplication.class, Arrays.asList("expression", "applications"));
spec.put(UndefinedLiteral.class, Arrays.<String>asList());
spec.put(UnionType.class, Arrays.asList("elements"));
spec.put(VoidLiteral.class, Arrays.<String>asList());
}
private static String sliceSource(String source, int index, int last) {
if (index >= source.length()) return "";
if (last > source.length()) last = source.length();
@@ -137,7 +114,7 @@ public class JSDocParser {
}
private static boolean isTypeName(char ch) {
return "><(){}[],:*|?!=".indexOf(ch) == -1 && !isWhiteSpace(ch) && !isLineTerminator(ch);
return "><(){}[],:*|?!=.".indexOf(ch) == -1 && !isWhiteSpace(ch) && !isLineTerminator(ch);
}
private static boolean isParamTitle(String title) {
@@ -559,21 +536,12 @@ public class JSDocParser {
}
private Token scanTypeName() {
char ch, ch2;
value = new String(Character.toChars(advance()));
StringBuilder sb = new StringBuilder();
sb.append((char)advance());
while (index < endIndex && isTypeName(source.charAt(index))) {
ch = source.charAt(index);
if (ch == '.') {
if ((index + 1) < endIndex) {
ch2 = source.charAt(index + 1);
if (ch2 == '<') {
break;
}
}
}
value += new String(Character.toChars(advance()));
sb.append((char)advance());
}
value = sb.toString();
return Token.NAME;
}
@@ -848,11 +816,24 @@ public class JSDocParser {
return finishNode(new RecordType(loc, fields));
}
private JSDocTypeExpression parseNameExpression() throws ParseError {
Object name = value;
private Identifier parseIdentifier() throws ParseError {
SourceLocation loc = loc();
Object value = this.value; // save the value of the current token
expect(Token.NAME);
return finishNode(new NameExpression(loc, name.toString()));
return finishNode(new Identifier(loc, value.toString()));
}
private JSDocTypeExpression parseNameExpression() throws ParseError {
JSDocTypeExpression node = parseIdentifier();
while (token == Token.DOT) {
consume(Token.DOT);
Identifier memberName = parseIdentifier();
// Create a SourceLocation object with the correct start location.
// The call to finishNode() will set the end location.
SourceLocation loc = new SourceLocation(node.getLoc());
node = finishNode(new QualifiedNameExpression(loc, node, memberName));
}
return node;
}
// TypeExpressionList :=
@@ -945,14 +926,14 @@ public class JSDocParser {
SourceLocation loc = loc();
expr = parseTypeExpression();
if (expr instanceof NameExpression && token == Token.COLON) {
if (expr instanceof Identifier && token == Token.COLON) {
// Identifier ':' TypeExpression
consume(Token.COLON);
expr =
finishNode(
new ParameterType(
new SourceLocation(loc),
((NameExpression) expr).getName(),
((Identifier) expr).getName(),
parseTypeExpression()));
}
if (token == Token.EQUAL) {
@@ -1128,7 +1109,7 @@ public class JSDocParser {
consume(Token.RBRACK, "expected an array-style type declaration (' + value + '[])");
List<JSDocTypeExpression> expressions = new ArrayList<>();
expressions.add(expr);
NameExpression nameExpr = finishNode(new NameExpression(new SourceLocation(loc), "Array"));
Identifier nameExpr = finishNode(new Identifier(new SourceLocation(loc), "Array"));
return finishNode(new TypeApplication(loc, nameExpr, expressions));
}
@@ -1549,9 +1530,9 @@ public class JSDocParser {
// fixed at the end
if (isParamTitle(this._title)
&& this._tag.type != null
&& this._tag.type instanceof NameExpression) {
this._extra_name = ((NameExpression) this._tag.type).getName();
this._tag.name = ((NameExpression) this._tag.type).getName();
&& this._tag.type instanceof Identifier) {
this._extra_name = ((Identifier) this._tag.type).getName();
this._tag.name = ((Identifier) this._tag.type).getName();
this._tag.type = null;
} else {
if (!this.addError("Missing or invalid tag name")) {
@@ -1667,7 +1648,7 @@ public class JSDocParser {
Position start = new Position(_tag.startLine, _tag.startColumn, _tag.startColumn);
Position end = new Position(_tag.startLine, _tag.startColumn, _tag.startColumn);
SourceLocation loc = new SourceLocation(_extra_name, start, end);
this._tag.type = new NameExpression(loc, _extra_name);
this._tag.type = new Identifier(loc, _extra_name);
}
this._tag.name = null;

View File

@@ -517,150 +517,170 @@ jsdoc_type_exprs(#20157,4,#20145,-1,"void")
locations_default(#20158,#10000,11,60,11,63)
hasLocation(#20157,#20158)
#20159=*
jsdoc_type_exprs(#20159,5,#20145,-2,"goog.ui.Menu")
jsdoc_type_exprs(#20159,15,#20145,-2,"goog.ui.Menu")
#20160=@"loc,{#10000},11,26,11,37"
locations_default(#20160,#10000,11,26,11,37)
hasLocation(#20159,#20160)
jsdoc_has_new_parameter(#20145)
#20161=*
jsdoc_tags(#20161,"param",#20117,4,"@param")
#20162=@"loc,{#10000},12,5,12,10"
locations_default(#20162,#10000,12,5,12,10)
jsdoc_type_exprs(#20161,15,#20159,0,"goog.ui")
#20162=@"loc,{#10000},11,26,11,32"
locations_default(#20162,#10000,11,26,11,32)
hasLocation(#20161,#20162)
jsdoc_tag_names(#20161,"var_args")
#20163=*
jsdoc_type_exprs(#20163,14,#20161,0,"...number")
#20164=@"loc,{#10000},12,13,12,21"
locations_default(#20164,#10000,12,13,12,21)
jsdoc_type_exprs(#20163,5,#20161,0,"goog")
#20164=@"loc,{#10000},11,26,11,29"
locations_default(#20164,#10000,11,26,11,29)
hasLocation(#20163,#20164)
#20165=*
jsdoc_type_exprs(#20165,5,#20163,0,"number")
#20166=@"loc,{#10000},12,16,12,21"
locations_default(#20166,#10000,12,16,12,21)
jsdoc_type_exprs(#20165,5,#20161,1,"ui")
#20166=@"loc,{#10000},11,31,11,32"
locations_default(#20166,#10000,11,31,11,32)
hasLocation(#20165,#20166)
#20167=*
jsdoc(#20167,"",#20010)
hasLocation(#20167,#20011)
#20168=*
jsdoc_tags(#20168,"param",#20167,0,"@param")
#20169=@"loc,{#10000},15,4,15,9"
locations_default(#20169,#10000,15,4,15,9)
hasLocation(#20168,#20169)
#20170=*
jsdoc_errors(#20170,#20168,"Missing or invalid tag name","Missing ... ag name")
jsdoc_type_exprs(#20167,5,#20159,1,"Menu")
#20168=@"loc,{#10000},11,34,11,37"
locations_default(#20168,#10000,11,34,11,37)
hasLocation(#20167,#20168)
jsdoc_has_new_parameter(#20145)
#20169=*
jsdoc_tags(#20169,"param",#20117,4,"@param")
#20170=@"loc,{#10000},12,5,12,10"
locations_default(#20170,#10000,12,5,12,10)
hasLocation(#20169,#20170)
jsdoc_tag_names(#20169,"var_args")
#20171=*
jsdoc_tags(#20171,"param",#20167,1,"@param")
#20172=@"loc,{#10000},16,4,16,9"
locations_default(#20172,#10000,16,4,16,9)
jsdoc_type_exprs(#20171,14,#20169,0,"...number")
#20172=@"loc,{#10000},12,13,12,21"
locations_default(#20172,#10000,12,13,12,21)
hasLocation(#20171,#20172)
jsdoc_tag_names(#20171,"x")
#20173=*
jsdoc(#20173,"",#20012)
hasLocation(#20173,#20013)
#20174=*
jsdoc_tags(#20174,"",#20173,0,"@")
#20175=@"loc,{#10000},20,4,20,4"
locations_default(#20175,#10000,20,4,20,4)
hasLocation(#20174,#20175)
jsdoc_tag_descriptions(#20174,"{link a}")
jsdoc_type_exprs(#20173,5,#20171,0,"number")
#20174=@"loc,{#10000},12,16,12,21"
locations_default(#20174,#10000,12,16,12,21)
hasLocation(#20173,#20174)
#20175=*
jsdoc(#20175,"",#20010)
hasLocation(#20175,#20011)
#20176=*
jsdoc_errors(#20176,#20174,"Missing or invalid title","Missing ... d title")
#20177=*
jsdoc(#20177,"",#20014)
hasLocation(#20177,#20015)
jsdoc_tags(#20176,"param",#20175,0,"@param")
#20177=@"loc,{#10000},15,4,15,9"
locations_default(#20177,#10000,15,4,15,9)
hasLocation(#20176,#20177)
#20178=*
jsdoc_tags(#20178,"typedef",#20177,0,"@typedef")
#20179=@"loc,{#10000},24,4,24,11"
locations_default(#20179,#10000,24,4,24,11)
hasLocation(#20178,#20179)
jsdoc_tag_descriptions(#20178,"{a}")
#20180=*
jsdoc_errors(#20180,#20178,"Missing or invalid tag type","Missing ... ag type")
jsdoc_errors(#20178,#20176,"Missing or invalid tag name","Missing ... ag name")
#20179=*
jsdoc_tags(#20179,"param",#20175,1,"@param")
#20180=@"loc,{#10000},16,4,16,9"
locations_default(#20180,#10000,16,4,16,9)
hasLocation(#20179,#20180)
jsdoc_tag_names(#20179,"x")
#20181=*
jsdoc(#20181,"[resize description]",#20016)
hasLocation(#20181,#20017)
jsdoc(#20181,"",#20012)
hasLocation(#20181,#20013)
#20182=*
jsdoc_tags(#20182,"param",#20181,0,"@param")
#20183=@"loc,{#10000},30,4,30,9"
locations_default(#20183,#10000,30,4,30,9)
jsdoc_tags(#20182,"",#20181,0,"@")
#20183=@"loc,{#10000},20,4,20,4"
locations_default(#20183,#10000,20,4,20,4)
hasLocation(#20182,#20183)
jsdoc_tag_descriptions(#20182,"[description]
")
jsdoc_tag_names(#20182,"w")
jsdoc_tag_descriptions(#20182,"{link a}")
#20184=*
jsdoc_type_exprs(#20184,10,#20182,0,"[type]")
#20185=@"loc,{#10000},30,13,30,18"
locations_default(#20185,#10000,30,13,30,18)
hasLocation(#20184,#20185)
jsdoc_errors(#20184,#20182,"Missing or invalid title","Missing ... d title")
#20185=*
jsdoc(#20185,"",#20014)
hasLocation(#20185,#20015)
#20186=*
jsdoc_type_exprs(#20186,5,#20184,0,"type")
#20187=@"loc,{#10000},30,14,30,17"
locations_default(#20187,#10000,30,14,30,17)
jsdoc_tags(#20186,"typedef",#20185,0,"@typedef")
#20187=@"loc,{#10000},24,4,24,11"
locations_default(#20187,#10000,24,4,24,11)
hasLocation(#20186,#20187)
jsdoc_tag_descriptions(#20186,"{a}")
#20188=*
jsdoc_tags(#20188,"param",#20181,1,"@param")
#20189=@"loc,{#10000},31,4,31,9"
locations_default(#20189,#10000,31,4,31,9)
hasLocation(#20188,#20189)
jsdoc_tag_descriptions(#20188,"[description]
")
jsdoc_errors(#20188,#20186,"Missing or invalid tag type","Missing ... ag type")
#20189=*
jsdoc(#20189,"[resize description]",#20016)
hasLocation(#20189,#20017)
#20190=*
jsdoc_tags(#20190,"return",#20181,2,"@return")
#20191=@"loc,{#10000},32,4,32,10"
locations_default(#20191,#10000,32,4,32,10)
jsdoc_tags(#20190,"param",#20189,0,"@param")
#20191=@"loc,{#10000},30,4,30,9"
locations_default(#20191,#10000,30,4,30,9)
hasLocation(#20190,#20191)
jsdoc_tag_descriptions(#20190,"[description]")
jsdoc_tag_descriptions(#20190,"[description]
")
jsdoc_tag_names(#20190,"w")
#20192=*
jsdoc_type_exprs(#20192,10,#20190,0,"[type]")
#20193=@"loc,{#10000},32,13,32,18"
locations_default(#20193,#10000,32,13,32,18)
#20193=@"loc,{#10000},30,13,30,18"
locations_default(#20193,#10000,30,13,30,18)
hasLocation(#20192,#20193)
#20194=*
jsdoc_type_exprs(#20194,5,#20192,0,"type")
#20195=@"loc,{#10000},32,14,32,17"
locations_default(#20195,#10000,32,14,32,17)
#20195=@"loc,{#10000},30,14,30,17"
locations_default(#20195,#10000,30,14,30,17)
hasLocation(#20194,#20195)
#20196=*
jsdoc(#20196,"",#20018)
hasLocation(#20196,#20019)
#20197=*
jsdoc_tags(#20197,"exports",#20196,0,"@exports")
#20198=@"loc,{#10000},36,3,36,10"
locations_default(#20198,#10000,36,3,36,10)
hasLocation(#20197,#20198)
jsdoc_tag_descriptions(#20197,"R
jsdoc_tags(#20196,"param",#20189,1,"@param")
#20197=@"loc,{#10000},31,4,31,9"
locations_default(#20197,#10000,31,4,31,9)
hasLocation(#20196,#20197)
jsdoc_tag_descriptions(#20196,"[description]
")
#20199=*
jsdoc(#20199,"",#20020)
hasLocation(#20199,#20021)
#20198=*
jsdoc_tags(#20198,"return",#20189,2,"@return")
#20199=@"loc,{#10000},32,4,32,10"
locations_default(#20199,#10000,32,4,32,10)
hasLocation(#20198,#20199)
jsdoc_tag_descriptions(#20198,"[description]")
#20200=*
jsdoc_tags(#20200,"typedef",#20199,0,"@typedef")
#20201=@"loc,{#10000},41,4,41,11"
locations_default(#20201,#10000,41,4,41,11)
jsdoc_type_exprs(#20200,10,#20198,0,"[type]")
#20201=@"loc,{#10000},32,13,32,18"
locations_default(#20201,#10000,32,13,32,18)
hasLocation(#20200,#20201)
#20202=*
jsdoc_type_exprs(#20202,9,#20200,0,"{0: number}")
#20203=@"loc,{#10000},41,14,41,24"
locations_default(#20203,#10000,41,14,41,24)
jsdoc_type_exprs(#20202,5,#20200,0,"type")
#20203=@"loc,{#10000},32,14,32,17"
locations_default(#20203,#10000,32,14,32,17)
hasLocation(#20202,#20203)
jsdoc_record_field_name(#20202,0,"0")
#20204=*
jsdoc_type_exprs(#20204,5,#20202,0,"number")
#20205=@"loc,{#10000},41,18,41,23"
locations_default(#20205,#10000,41,18,41,23)
hasLocation(#20204,#20205)
toplevels(#20001,0)
#20206=@"loc,{#10000},1,1,43,0"
locations_default(#20206,#10000,1,1,43,0)
hasLocation(#20001,#20206)
jsdoc(#20204,"",#20018)
hasLocation(#20204,#20019)
#20205=*
jsdoc_tags(#20205,"exports",#20204,0,"@exports")
#20206=@"loc,{#10000},36,3,36,10"
locations_default(#20206,#10000,36,3,36,10)
hasLocation(#20205,#20206)
jsdoc_tag_descriptions(#20205,"R
")
#20207=*
entry_cfg_node(#20207,#20001)
#20208=@"loc,{#10000},1,1,1,0"
locations_default(#20208,#10000,1,1,1,0)
hasLocation(#20207,#20208)
#20209=*
exit_cfg_node(#20209,#20001)
hasLocation(#20209,#20105)
successor(#20207,#20209)
jsdoc(#20207,"",#20020)
hasLocation(#20207,#20021)
#20208=*
jsdoc_tags(#20208,"typedef",#20207,0,"@typedef")
#20209=@"loc,{#10000},41,4,41,11"
locations_default(#20209,#10000,41,4,41,11)
hasLocation(#20208,#20209)
#20210=*
jsdoc_type_exprs(#20210,9,#20208,0,"{0: number}")
#20211=@"loc,{#10000},41,14,41,24"
locations_default(#20211,#10000,41,14,41,24)
hasLocation(#20210,#20211)
jsdoc_record_field_name(#20210,0,"0")
#20212=*
jsdoc_type_exprs(#20212,5,#20210,0,"number")
#20213=@"loc,{#10000},41,18,41,23"
locations_default(#20213,#10000,41,18,41,23)
hasLocation(#20212,#20213)
toplevels(#20001,0)
#20214=@"loc,{#10000},1,1,43,0"
locations_default(#20214,#10000,1,1,43,0)
hasLocation(#20001,#20214)
#20215=*
entry_cfg_node(#20215,#20001)
#20216=@"loc,{#10000},1,1,1,0"
locations_default(#20216,#10000,1,1,1,0)
hasLocation(#20215,#20216)
#20217=*
exit_cfg_node(#20217,#20001)
hasLocation(#20217,#20105)
successor(#20215,#20217)
numlines(#10000,42,0,37)
filetype(#10000,"javascript")

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* Added support for the `underscore.string` package.

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* Added support for the `ApolloServer` class from `@apollo/server` and similar packages. In particular, the incoming data in a GraphQL resolver is now seen as a source of untrusted user input.

Some files were not shown because too many files have changed in this diff Show More