Compare commits

...

13 Commits

Author SHA1 Message Date
Aditya Sharad
93efbfe1c7 WIP: Go: Add context query for retrieving call graph edges 2025-04-17 15:06:16 -07:00
Aditya Sharad
4a1b988f39 WIP: Ruby: Add context query for retrieving XSS sanitisers 2025-04-17 15:05:56 -07:00
Paolo Tranquilli
6176202d50 Merge pull request #19291 from github/redsun82/rust-pick-edition
Rust: pick correct edition for the files
2025-04-14 16:26:00 +02:00
Paolo Tranquilli
c9cff09f5d Merge branch 'main' into redsun82/rust-pick-edition 2025-04-14 15:19:33 +02:00
Michael Nebel
53c4b29b50 Merge pull request #19289 from michaelnebel/csharp/improveautobuilder
C#: Improve auto-builder to better detect SDK references.
2025-04-14 12:43:23 +02:00
Paolo Tranquilli
c245459e97 Merge pull request #19293 from github/redsun82/rust-fix-member-aggregation
Rust: fix workspace member aggregation when absolute path is a glob pattern
2025-04-14 12:08:43 +02:00
Paolo Tranquilli
63e5f5a555 Rust: parametrize some integration tests on three editions 2025-04-11 16:50:23 +02:00
Paolo Tranquilli
868680f078 Merge branch 'redsun82/rust-fix-member-aggregation' into redsun82/rust-pick-edition 2025-04-11 16:46:16 +02:00
Paolo Tranquilli
60aa3a8d9d Rust: fix workspace member aggregation when absolute path is a glob pattern
We were interpreting the absolute path of a workspace as a glob pattern,
which doesn't work if the path has some special characters (e.g. `[` or
`]`).
2025-04-11 16:41:51 +02:00
Paolo Tranquilli
dbbd80f4dc Rust: pick correct edition for the files
Previously we would unconditionally set the edition to the latest stable
according to rust-analyzer (2021 at the moment). Now we ask
rust-analyzer itself to pick the correct edition for the file.
2025-04-11 15:36:45 +02:00
Michael Nebel
f349048e42 C#: Add change note. 2025-04-11 13:53:54 +02:00
Michael Nebel
31143b405e C#: Improve auto builder logic to detect Sdk reference. 2025-04-11 13:53:52 +02:00
Michael Nebel
a5aef8c6f9 C#: Add some more DotNet autobuilder unit tests. 2025-04-11 12:03:06 +02:00
12 changed files with 217 additions and 80 deletions

View File

@@ -424,8 +424,7 @@ namespace Semmle.Autobuild.CSharp.Tests
return new CSharpAutobuilder(actions, options);
}
[Fact]
public void TestDefaultCSharpAutoBuilder()
private void SetupActionForDotnet()
{
actions.RunProcess["cmd.exe /C dotnet --info"] = 0;
actions.RunProcess[@"cmd.exe /C dotnet clean C:\Project\test.csproj"] = 0;
@@ -438,20 +437,80 @@ namespace Semmle.Autobuild.CSharp.Tests
actions.GetEnvironmentVariable["CODEQL_EXTRACTOR_CSHARP_SCRATCH_DIR"] = "scratch";
actions.EnumerateFiles[@"C:\Project"] = "foo.cs\nbar.cs\ntest.csproj";
actions.EnumerateDirectories[@"C:\Project"] = "";
var xml = new XmlDocument();
xml.LoadXml(@"<Project Sdk=""Microsoft.NET.Sdk"">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.1</TargetFramework>
</PropertyGroup>
}
</Project>");
private void CreateAndVerifyDotnetScript(XmlDocument xml)
{
actions.LoadXml[@"C:\Project\test.csproj"] = xml;
var autobuilder = CreateAutoBuilder(true);
TestAutobuilderScript(autobuilder, 0, 4);
}
[Fact]
public void TestDefaultCSharpAutoBuilder1()
{
SetupActionForDotnet();
var xml = new XmlDocument();
xml.LoadXml(
"""
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp2.1</TargetFramework>
</PropertyGroup>
</Project>
""");
CreateAndVerifyDotnetScript(xml);
}
[Fact]
public void TestDefaultCSharpAutoBuilder2()
{
SetupActionForDotnet();
var xml = new XmlDocument();
xml.LoadXml(
"""
<Project>
<Sdk Name="Microsoft.NET.Sdk" />
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
</Project>
"""
);
CreateAndVerifyDotnetScript(xml);
}
[Fact]
public void TestDefaultCSharpAutoBuilder3()
{
SetupActionForDotnet();
var xml = new XmlDocument();
xml.LoadXml(
"""
<Project>
<Import Project="Sdk.props" Sdk="Microsoft.NET.Sdk" />
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<Import Project="Sdk.targets" Sdk="Microsoft.NET.Sdk" />
</Project>
"""
);
CreateAndVerifyDotnetScript(xml);
}
[Fact]
public void TestLinuxCSharpAutoBuilder()
{

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Xml;
using Semmle.Util.Logging;
namespace Semmle.Autobuild.Shared
{
@@ -26,6 +25,26 @@ namespace Semmle.Autobuild.Shared
private readonly Lazy<List<Project<TAutobuildOptions>>> includedProjectsLazy;
public override IEnumerable<IProjectOrSolution> IncludedProjects => includedProjectsLazy.Value;
private static bool HasSdkAttribute(XmlElement xml) =>
xml.HasAttribute("Sdk");
private static bool AnyElement(XmlNodeList l, Func<XmlElement, bool> f) =>
l.OfType<XmlElement>().Any(f);
/// <summary>
/// According to https://learn.microsoft.com/en-us/visualstudio/msbuild/how-to-use-project-sdk?view=vs-2022#reference-a-project-sdk
/// there are three ways to reference a project SDK:
/// 1. As an attribute on the <Project/>.
/// 2. As a top level element of <Project>.
/// 3. As an attribute on an <Import> element.
///
/// Returns true, if the Sdk attribute is used, otherwise false.
/// </summary>
private static bool ReferencesSdk(XmlElement xml) =>
HasSdkAttribute(xml) || // Case 1
AnyElement(xml.ChildNodes, e => e.Name == "Sdk") || // Case 2
AnyElement(xml.GetElementsByTagName("Import"), HasSdkAttribute); // Case 3
public Project(Autobuilder<TAutobuildOptions> builder, string path) : base(builder, path)
{
ToolsVersion = new Version();
@@ -49,7 +68,7 @@ namespace Semmle.Autobuild.Shared
if (root?.Name == "Project")
{
if (root.HasAttribute("Sdk"))
if (ReferencesSdk(root))
{
DotNetProject = true;
return;

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* Improved autobuilder logic for detecting whether a project references a SDK (and should be built using `dotnet`).

15
go/ql/src/CallGraph.ql Normal file
View File

@@ -0,0 +1,15 @@
/**
* @name Call graph
* @description Generates the call graph for the program.
* @kind graph
* @id go/call-graph
*/
import go
import semmle.go.dataflow.DataFlow
query predicate edges(CallExpr call, Function f, string key, string value) {
call.getTarget() = f and
key = "semmle.label" and
value = f.getQualifiedName()
}

12
ruby/ql/src/Sanitizers.ql Normal file
View File

@@ -0,0 +1,12 @@
/**
* @name Sanitizers
* @id rb/meta/sanitizers
* @kind problem
* @severity info
*/
import codeql.ruby.DataFlow
import codeql.ruby.security.XSS
from StoredXss::Sanitizer s
where s instanceof DataFlow::CallNode
select s, "XSS sanitizer"

View File

@@ -1,5 +1,5 @@
use itertools::Itertools;
use ra_ap_base_db::{EditionedFileId, RootQueryDb, SourceDatabase};
use ra_ap_base_db::{EditionedFileId, FileText, RootQueryDb, SourceDatabase};
use ra_ap_hir::Semantics;
use ra_ap_ide_db::RootDatabase;
use ra_ap_load_cargo::{LoadCargoConfig, load_workspace_at};
@@ -7,7 +7,6 @@ use ra_ap_paths::{AbsPath, Utf8PathBuf};
use ra_ap_project_model::ProjectManifest;
use ra_ap_project_model::{CargoConfig, ManifestPath};
use ra_ap_span::Edition;
use ra_ap_span::EditionedFileId as SpanEditionedFileId;
use ra_ap_span::TextRange;
use ra_ap_span::TextSize;
use ra_ap_syntax::SourceFile;
@@ -54,7 +53,6 @@ impl<'a> RustAnalyzer<'a> {
) -> Option<(RootDatabase, Vfs)> {
let progress = |t| (trace!("progress: {}", t));
let manifest = project.manifest_path();
match load_workspace_at(manifest.as_ref(), config, load_config, &progress) {
Ok((db, vfs, _macro_server)) => Some((db, vfs)),
Err(err) => {
@@ -66,67 +64,70 @@ impl<'a> RustAnalyzer<'a> {
pub fn new(vfs: &'a Vfs, semantics: &'a Semantics<'a, RootDatabase>) -> Self {
RustAnalyzer::WithSemantics { vfs, semantics }
}
pub fn parse(&self, path: &Path) -> ParseResult {
let no_semantics_reason;
fn get_file_data(
&self,
path: &Path,
) -> Result<(&Semantics<RootDatabase>, EditionedFileId, FileText), &str> {
match self {
RustAnalyzer::WithoutSemantics { reason } => Err(reason),
RustAnalyzer::WithSemantics { vfs, semantics } => {
if let Some(file_id) = path_to_file_id(path, vfs) {
if let Ok(input) = std::panic::catch_unwind(|| semantics.db.file_text(file_id))
{
let file_id = EditionedFileId::new(
semantics.db,
SpanEditionedFileId::current_edition(file_id),
);
let source_file = semantics.parse(file_id);
let errors = semantics
.db
.parse_errors(file_id)
.into_iter()
.flat_map(|x| x.to_vec())
.collect();
return ParseResult {
ast: source_file,
text: input.text(semantics.db),
errors,
semantics_info: Ok(FileSemanticInformation { file_id, semantics }),
};
}
debug!(
"No text available for file_id '{:?}', falling back to loading file '{}' from disk.",
file_id,
path.to_string_lossy()
);
no_semantics_reason = "no text available for the file in the project";
} else {
no_semantics_reason = "file not found in project";
}
}
RustAnalyzer::WithoutSemantics { reason } => {
no_semantics_reason = reason;
let file_id = path_to_file_id(path, vfs).ok_or("file not found in project")?;
let input = std::panic::catch_unwind(|| semantics.db.file_text(file_id))
.or(Err("no text available for the file in the project"))?;
let editioned_file_id = semantics
.attach_first_edition(file_id)
.ok_or("failed to determine rust edition")?;
Ok((
semantics,
EditionedFileId::new(semantics.db, editioned_file_id),
input,
))
}
}
let mut errors = Vec::new();
let input = match std::fs::read(path) {
Ok(data) => data,
Err(e) => {
errors.push(SyntaxError::new(
format!("Could not read {}: {}", path.to_string_lossy(), e),
TextRange::empty(TextSize::default()),
));
vec![]
}
};
let (input, err) = from_utf8_lossy(&input);
}
let parse = ra_ap_syntax::ast::SourceFile::parse(&input, Edition::CURRENT);
errors.extend(parse.errors());
errors.extend(err);
ParseResult {
ast: parse.tree(),
text: input.as_ref().into(),
errors,
semantics_info: Err(no_semantics_reason),
pub fn parse(&self, path: &Path) -> ParseResult {
match self.get_file_data(path) {
Ok((semantics, file_id, input)) => {
let source_file = semantics.parse(file_id);
let errors = semantics
.db
.parse_errors(file_id)
.into_iter()
.flat_map(|x| x.to_vec())
.collect();
ParseResult {
ast: source_file,
text: input.text(semantics.db),
errors,
semantics_info: Ok(FileSemanticInformation { file_id, semantics }),
}
}
Err(reason) => {
let mut errors = Vec::new();
let input = match std::fs::read(path) {
Ok(data) => data,
Err(e) => {
errors.push(SyntaxError::new(
format!("Could not read {}: {}", path.to_string_lossy(), e),
TextRange::empty(TextSize::default()),
));
vec![]
}
};
let (input, err) = from_utf8_lossy(&input);
let parse = ra_ap_syntax::ast::SourceFile::parse(&input, Edition::CURRENT);
errors.extend(parse.errors());
errors.extend(err);
ParseResult {
ast: parse.tree(),
text: input.as_ref().into(),
errors,
semantics_info: Err(reason),
}
}
}
}
}
@@ -173,8 +174,10 @@ impl TomlReader {
}
fn workspace_members_match(workspace_dir: &AbsPath, members: &[String], target: &AbsPath) -> bool {
members.iter().any(|p| {
glob::Pattern::new(workspace_dir.join(p).as_str()).is_ok_and(|p| p.matches(target.as_str()))
target.strip_prefix(workspace_dir).is_some_and(|rel_path| {
members
.iter()
.any(|p| glob::Pattern::new(p).is_ok_and(|p| p.matches(rel_path.as_str())))
})
}

View File

@@ -2,13 +2,35 @@ import pytest
import json
import commands
import pathlib
import tomllib
@pytest.fixture(params=[2018, 2021, 2024])
def rust_edition(request):
return request.param
@pytest.fixture
def cargo(cwd):
assert (cwd / "Cargo.toml").exists()
def cargo(cwd, rust_edition):
manifest_file = cwd / "Cargo.toml"
assert manifest_file.exists()
(cwd / "rust-project.json").unlink(missing_ok=True)
def update(file):
contents = file.read_text()
m = tomllib.loads(contents)
if 'package' in m:
# tomllib does not support writing, and we don't want to use further dependencies
# so we just do a dumb search and replace
contents = contents.replace(f'edition = "{m["package"]["edition"]}"', f'edition = "{rust_edition}"')
file.write_text(contents)
if 'members' in m.get('workspace', ()):
for member in m['workspace']['members']:
update(file.parent / member / "Cargo.toml")
update(manifest_file)
@pytest.fixture(scope="session")
def rust_sysroot_src() -> str:
rust_sysroot = pathlib.Path(commands.run("rustc --print sysroot", _capture=True))
@@ -16,15 +38,19 @@ def rust_sysroot_src() -> str:
assert ret.exists()
return str(ret)
@pytest.fixture
def rust_project(cwd, rust_sysroot_src):
def rust_project(cwd, rust_sysroot_src, rust_edition):
project_file = cwd / "rust-project.json"
assert project_file.exists()
project = json.loads(project_file.read_text())
project["sysroot_src"] = rust_sysroot_src
for c in project["crates"]:
c["edition"] = str(rust_edition)
project_file.write_text(json.dumps(project, indent=4))
(cwd / "Cargo.toml").unlink(missing_ok=True)
@pytest.fixture
def rust_check_diagnostics(check_diagnostics):
check_diagnostics.redact += [

View File

@@ -2,6 +2,6 @@
[package]
name = "hello-cargo"
version = "0.1.0"
edition = "2021"
edition = "2021" # replaced in test
[dependencies]

View File

@@ -1,7 +1,7 @@
[package]
name = "exe"
version = "0.1.0"
edition = "2021"
edition = "2021" # replaced in test
[dependencies]
lib = { path = "../lib" }

View File

@@ -1,6 +1,6 @@
[package]
name = "lib"
version = "0.1.0"
edition = "2021"
edition = "2021" # replaced in test
[dependencies]

View File

@@ -21,4 +21,4 @@
"deps": []
}
]
}
}

View File

@@ -1,6 +1,5 @@
import pytest
@pytest.mark.ql_test("steps.ql", expected=".cargo.expected")
@pytest.mark.ql_test("summary.qlref", expected=".cargo.expected")
def test_cargo(codeql, rust, cargo, check_source_archive, rust_check_diagnostics):