wip: reorganized to go pkg structure
This commit is contained in:
committed by
=Michael Hohn
parent
198453ee90
commit
9750eeab20
4
pkg/agent/agent.go
Normal file
4
pkg/agent/agent.go
Normal file
@@ -0,0 +1,4 @@
|
||||
package agent
|
||||
|
||||
type RunnerSingle struct {
|
||||
}
|
||||
4
pkg/agent/interfaces.go
Normal file
4
pkg/agent/interfaces.go
Normal file
@@ -0,0 +1,4 @@
|
||||
package agent
|
||||
|
||||
type Runner interface {
|
||||
}
|
||||
4
pkg/common/interfaces.go
Normal file
4
pkg/common/interfaces.go
Normal file
@@ -0,0 +1,4 @@
|
||||
package common
|
||||
|
||||
type Common interface {
|
||||
}
|
||||
4
pkg/logger/interfaces.go
Normal file
4
pkg/logger/interfaces.go
Normal file
@@ -0,0 +1,4 @@
|
||||
package logger
|
||||
|
||||
type Logger interface {
|
||||
}
|
||||
4
pkg/logger/types.go
Normal file
4
pkg/logger/types.go
Normal file
@@ -0,0 +1,4 @@
|
||||
package logger
|
||||
|
||||
type LoggerSingle struct {
|
||||
}
|
||||
4
pkg/queue/interfaces.go
Normal file
4
pkg/queue/interfaces.go
Normal file
@@ -0,0 +1,4 @@
|
||||
package queue
|
||||
|
||||
type Queue interface {
|
||||
}
|
||||
4
pkg/queue/types.go
Normal file
4
pkg/queue/types.go
Normal file
@@ -0,0 +1,4 @@
|
||||
package queue
|
||||
|
||||
type QueueSingle struct {
|
||||
}
|
||||
3
pkg/server/interfaces.go
Normal file
3
pkg/server/interfaces.go
Normal file
@@ -0,0 +1,3 @@
|
||||
package server
|
||||
|
||||
type Commander interface{}
|
||||
354
pkg/server/server.go
Normal file
354
pkg/server/server.go
Normal file
@@ -0,0 +1,354 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/hohn/ghes-mirva-server/analyze"
|
||||
"github.com/hohn/ghes-mirva-server/api"
|
||||
co "github.com/hohn/ghes-mirva-server/common"
|
||||
"github.com/hohn/ghes-mirva-server/store"
|
||||
)
|
||||
|
||||
func (c *CommanderSingle) Run() {
|
||||
}
|
||||
|
||||
func (c *CommanderSingle) Setup(st *State) {
|
||||
r := mux.NewRouter()
|
||||
c.st = st
|
||||
|
||||
//
|
||||
// First are the API endpoints that mirror those used in the github API
|
||||
//
|
||||
r.HandleFunc("/repos/{owner}/{repo}/code-scanning/codeql/variant-analyses", c.MirvaRequest)
|
||||
// /repos/hohn /mirva-controller/code-scanning/codeql/variant-analyses
|
||||
// Or via
|
||||
r.HandleFunc("/{repository_id}/code-scanning/codeql/variant-analyses", c.MirvaRequestID)
|
||||
|
||||
r.HandleFunc("/", c.RootHandler)
|
||||
|
||||
// This is the standalone status request.
|
||||
// It's also the first request made when downloading; the difference is on the
|
||||
// client side's handling.
|
||||
r.HandleFunc("/repos/{owner}/{repo}/code-scanning/codeql/variant-analyses/{codeql_variant_analysis_id}", c.MirvaStatus)
|
||||
|
||||
r.HandleFunc("/repos/{controller_owner}/{controller_repo}/code-scanning/codeql/variant-analyses/{codeql_variant_analysis_id}/repos/{repo_owner}/{repo_name}", c.MirvaDownloadArtifact)
|
||||
|
||||
// Not implemented:
|
||||
// r.HandleFunc("/codeql-query-console/codeql-variant-analysis-repo-tasks/{codeql_variant_analysis_id}/{repo_id}/{owner_id}/{controller_repo_id}", MirvaDownLoad3)
|
||||
// r.HandleFunc("/github-codeql-query-console-prod/codeql-variant-analysis-repo-tasks/{codeql_variant_analysis_id}/{repo_id}", MirvaDownLoad4)
|
||||
|
||||
//
|
||||
// Now some support API endpoints
|
||||
//
|
||||
r.HandleFunc("/download-server/{local_path:.*}", c.MirvaDownloadServe)
|
||||
|
||||
//
|
||||
// Bind to a port and pass our router in
|
||||
//
|
||||
log.Fatal(http.ListenAndServe(":8080", r))
|
||||
}
|
||||
|
||||
func (c *CommanderSingle) StatusResponse(w http.ResponseWriter, js co.JobSpec, ji co.JobInfo, vaid int) {
|
||||
slog.Debug("Submitting status response", "session", vaid)
|
||||
|
||||
all_scanned := []api.ScannedRepo{}
|
||||
jobs := store.GetJobList(js.ID)
|
||||
for _, job := range jobs {
|
||||
astat := store.GetStatus(js.ID, job.ORL).ToExternalString()
|
||||
all_scanned = append(all_scanned,
|
||||
api.ScannedRepo{
|
||||
Repository: api.Repository{
|
||||
ID: 0,
|
||||
Name: job.ORL.Repo,
|
||||
FullName: fmt.Sprintf("%s/%s", job.ORL.Owner, job.ORL.Repo),
|
||||
Private: false,
|
||||
StargazersCount: 0,
|
||||
UpdatedAt: ji.UpdatedAt,
|
||||
},
|
||||
AnalysisStatus: astat,
|
||||
ResultCount: 123, // FIXME 123 is a lie so the client downloads
|
||||
ArtifactSizeBytes: 123, // FIXME
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
astat := store.GetStatus(js.ID, js.OwnerRepo).ToExternalString()
|
||||
|
||||
status := api.StatusResponse{
|
||||
SessionId: js.ID,
|
||||
ControllerRepo: api.ControllerRepo{},
|
||||
Actor: api.Actor{},
|
||||
QueryLanguage: ji.QueryLanguage,
|
||||
QueryPackURL: "", // FIXME
|
||||
CreatedAt: ji.CreatedAt,
|
||||
UpdatedAt: ji.UpdatedAt,
|
||||
ActionsWorkflowRunID: 0, // FIXME
|
||||
Status: astat,
|
||||
ScannedRepositories: all_scanned,
|
||||
SkippedRepositories: ji.SkippedRepositories,
|
||||
}
|
||||
|
||||
// Encode the response as JSON
|
||||
submitStatus, err := json.Marshal(status)
|
||||
if err != nil {
|
||||
slog.Error("Error encoding response as JSON:",
|
||||
"error", err)
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Send analysisReposJSON via ResponseWriter
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write(submitStatus)
|
||||
}
|
||||
|
||||
func (c *CommanderSingle) RootHandler(w http.ResponseWriter, r *http.Request) {
|
||||
slog.Info("Request on /")
|
||||
}
|
||||
|
||||
func (c *CommanderSingle) MirvaStatus(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
slog.Info("mrva status request for ",
|
||||
"owner", vars["owner"],
|
||||
"repo", vars["repo"],
|
||||
"codeql_variant_analysis_id", vars["codeql_variant_analysis_id"])
|
||||
id, err := strconv.Atoi(vars["codeql_variant_analysis_id"])
|
||||
if err != nil {
|
||||
slog.Error("Variant analysis is is not integer", "id",
|
||||
vars["codeql_variant_analysis_id"])
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
// The status reports one status for all jobs belonging to an id.
|
||||
// So we simply report the status of a job as the status of all.
|
||||
spec := store.GetJobList(id)
|
||||
if spec == nil {
|
||||
slog.Error("No jobs found for given job id",
|
||||
"id", vars["codeql_variant_analysis_id"])
|
||||
http.Error(w, err.Error(), http.StatusUnprocessableEntity)
|
||||
return
|
||||
}
|
||||
|
||||
job := spec[0]
|
||||
|
||||
js := co.JobSpec{
|
||||
ID: job.QueryPackId,
|
||||
OwnerRepo: job.ORL,
|
||||
}
|
||||
|
||||
ji := store.GetJobInfo(js)
|
||||
|
||||
analyze.StatusResponse(w, js, ji, id)
|
||||
c.StatusResponse(w, js, ji, id)
|
||||
}
|
||||
|
||||
// Download artifacts
|
||||
func (c *CommanderSingle) MirvaDownloadArtifact(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
slog.Info("MRVA artifact download",
|
||||
"controller_owner", vars["controller_owner"],
|
||||
"controller_repo", vars["controller_repo"],
|
||||
"codeql_variant_analysis_id", vars["codeql_variant_analysis_id"],
|
||||
"repo_owner", vars["repo_owner"],
|
||||
"repo_name", vars["repo_name"],
|
||||
)
|
||||
vaid, err := strconv.Atoi(vars["codeql_variant_analysis_id"])
|
||||
if err != nil {
|
||||
slog.Error("Variant analysis is is not integer", "id",
|
||||
vars["codeql_variant_analysis_id"])
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
js := co.JobSpec{
|
||||
ID: vaid,
|
||||
OwnerRepo: co.OwnerRepo{
|
||||
Owner: vars["repo_owner"],
|
||||
Repo: vars["repo_name"],
|
||||
},
|
||||
}
|
||||
analyze.DownloadResponse(w, js, vaid)
|
||||
|
||||
}
|
||||
|
||||
func (c *CommanderSingle) MirvaDownloadServe(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
slog.Info("File download request", "local_path", vars["local_path"])
|
||||
|
||||
analyze.FileDownload(w, vars["local_path"])
|
||||
}
|
||||
|
||||
func (c *CommanderSingle) MirvaRequestID(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
slog.Info("New mrva using repository_id=%v\n", vars["repository_id"])
|
||||
}
|
||||
|
||||
func (c *CommanderSingle) MirvaRequest(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
slog.Info("New mrva run ", "owner", vars["owner"], "repo", vars["repo"])
|
||||
// session := new(MirvaSession)
|
||||
session_id := c.st.Storage.NextID()
|
||||
session_owner := vars["owner"]
|
||||
session_controller_repo := vars["repo"]
|
||||
slog.Info("new run", "id: ", fmt.Sprint(session_id), session_owner, session_controller_repo)
|
||||
|
||||
session_language, session_repositories, session_tgz_ref, err := c.collectRequestInfo(w, r, session_id)
|
||||
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
not_found_repos, analysisRepos := c.st.Storage.FindAvailableDBs(session_repositories)
|
||||
|
||||
// TODO into Queue
|
||||
// session_start_analyses()
|
||||
|
||||
// TODO into Commander (here)
|
||||
si := SessionInfo{
|
||||
ID: session_id,
|
||||
Owner: session_owner,
|
||||
ControllerRepo: session_controller_repo,
|
||||
|
||||
QueryPack: session_tgz_ref,
|
||||
Language: session_language,
|
||||
Repositories: session_repositories,
|
||||
|
||||
AccessMismatchRepos: nil, /* FIXME */
|
||||
NotFoundRepos: not_found_repos,
|
||||
NoCodeqlDBRepos: nil, /* FIXME */
|
||||
OverLimitRepos: nil, /* FIXME */
|
||||
|
||||
AnalysisRepos: analysisRepos,
|
||||
}
|
||||
|
||||
c.submit_response(si)
|
||||
|
||||
// TODO into Storage
|
||||
// session_save()
|
||||
|
||||
}
|
||||
func (c *CommanderSingle) submit_response(s SessionInfo) {
|
||||
// TODO
|
||||
}
|
||||
|
||||
func (c *CommanderSingle) collectRequestInfo(w http.ResponseWriter, r *http.Request, sessionId int) (string, []co.OwnerRepo, string, error) {
|
||||
slog.Debug("Collecting session info")
|
||||
|
||||
if r.Body == nil {
|
||||
err := errors.New("Missing request body")
|
||||
log.Println(err)
|
||||
http.Error(w, err.Error(), http.StatusNoContent)
|
||||
return "", []co.OwnerRepo{}, "", err
|
||||
}
|
||||
buf, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
var w http.ResponseWriter
|
||||
slog.Error("Error reading MRVA submission body", "error", err.Error())
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return "", []co.OwnerRepo{}, "", err
|
||||
}
|
||||
msg, err := TrySubmitMsg(buf)
|
||||
if err != nil {
|
||||
// Unknown message
|
||||
slog.Error("Unknown MRVA submission body format")
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return "", []co.OwnerRepo{}, "", err
|
||||
}
|
||||
// Decompose the SubmitMsg and keep information
|
||||
|
||||
// Save the query pack and keep the location
|
||||
if !isBase64Gzip([]byte(msg.QueryPack)) {
|
||||
slog.Error("MRVA submission body querypack has invalid format")
|
||||
err := errors.New("MRVA submission body querypack has invalid format")
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return "", []co.OwnerRepo{}, "", err
|
||||
}
|
||||
session_tgz_ref, err := c.extract_tgz(msg.QueryPack, sessionId)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return "", []co.OwnerRepo{}, "", err
|
||||
}
|
||||
|
||||
// 2. Save the language
|
||||
session_language := msg.Language
|
||||
|
||||
// 3. Save the repositories
|
||||
var session_repositories []co.OwnerRepo
|
||||
|
||||
for _, v := range msg.Repositories {
|
||||
t := strings.Split(v, "/")
|
||||
if len(t) != 2 {
|
||||
slog.Error("Invalid owner / repository entry", "entry", t)
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
}
|
||||
session_repositories = append(session_repositories,
|
||||
co.OwnerRepo{Owner: t[0], Repo: t[1]})
|
||||
}
|
||||
return session_language, session_repositories, session_tgz_ref, nil
|
||||
}
|
||||
|
||||
// Try to extract a SubmitMsg from a json-encoded buffer
|
||||
func TrySubmitMsg(buf []byte) (SubmitMsg, error) {
|
||||
buf1 := make([]byte, len(buf))
|
||||
copy(buf1, buf)
|
||||
dec := json.NewDecoder(bytes.NewReader(buf1))
|
||||
dec.DisallowUnknownFields()
|
||||
var m SubmitMsg
|
||||
err := dec.Decode(&m)
|
||||
return m, err
|
||||
}
|
||||
|
||||
// Some important payloads can be listed via
|
||||
// base64 -d < foo1 | gunzip | tar t|head -20
|
||||
//
|
||||
// This function checks the request body up to the `gunzip` part.
|
||||
func isBase64Gzip(val []byte) bool {
|
||||
if len(val) >= 4 {
|
||||
// Extract header
|
||||
hdr := make([]byte, base64.StdEncoding.DecodedLen(4))
|
||||
_, err := base64.StdEncoding.Decode(hdr, []byte(val[0:4]))
|
||||
if err != nil {
|
||||
log.Println("WARNING: IsBase64Gzip decode error:", err)
|
||||
return false
|
||||
}
|
||||
// Check for gzip heading
|
||||
magic := []byte{0x1f, 0x8b}
|
||||
if bytes.Equal(hdr[0:2], magic) {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (c *CommanderSingle) extract_tgz(qp string, sessionID int) (string, error) {
|
||||
// These are decoded manually via
|
||||
// base64 -d < foo1 | gunzip | tar t | head -20
|
||||
// base64 decode the body
|
||||
slog.Debug("Extracting query pack")
|
||||
|
||||
tgz, err := base64.StdEncoding.DecodeString(qp)
|
||||
if err != nil {
|
||||
slog.Error("querypack body decoding error:", err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
session_query_pack_tgz_filepath, err := c.st.Storage.SaveQueryPack(tgz, sessionID)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return session_query_pack_tgz_filepath, err
|
||||
}
|
||||
257
pkg/server/types.go
Normal file
257
pkg/server/types.go
Normal file
@@ -0,0 +1,257 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"mrvacommander/pkg/agent"
|
||||
"mrvacommander/pkg/logger"
|
||||
"mrvacommander/pkg/queue"
|
||||
"mrvacommander/pkg/storage"
|
||||
|
||||
co "github.com/hohn/ghes-mirva-server/common"
|
||||
)
|
||||
|
||||
type DownloadResponse struct {
|
||||
Repository DownloadRepo `json:"repository"`
|
||||
AnalysisStatus string `json:"analysis_status"`
|
||||
ResultCount int `json:"result_count"`
|
||||
ArtifactSizeBytes int `json:"artifact_size_in_bytes"`
|
||||
DatabaseCommitSha string `json:"database_commit_sha"`
|
||||
SourceLocationPrefix string `json:"source_location_prefix"`
|
||||
ArtifactURL string `json:"artifact_url"`
|
||||
}
|
||||
|
||||
type DownloadRepo struct {
|
||||
ID int `json:"id"`
|
||||
NodeID string `json:"node_id"`
|
||||
Name string `json:"name"`
|
||||
FullName string `json:"full_name"`
|
||||
Private bool `json:"private"`
|
||||
Owner Actor `json:"owner"`
|
||||
|
||||
HTMLURL string `json:"html_url"`
|
||||
Description string `json:"description"`
|
||||
Fork bool `json:"fork"`
|
||||
ForksURL string `json:"forks_url"`
|
||||
KeysURL string `json:"keys_url"`
|
||||
CollaboratorsURL string `json:"collaborators_url"`
|
||||
TeamsURL string `json:"teams_url"`
|
||||
HooksURL string `json:"hooks_url"`
|
||||
IssueEventsURL string `json:"issue_events_url"`
|
||||
EventsURL string `json:"events_url"`
|
||||
|
||||
AssigneesURL string `json:"assignees_url"`
|
||||
BranchesURL string `json:"branches_url"`
|
||||
TagsURL string `json:"tags_url"`
|
||||
BlobsURL string `json:"blobs_url"`
|
||||
GitTagsURL string `json:"git_tags_url"`
|
||||
GitRefsURL string `json:"git_refs_url"`
|
||||
TreesURL string `json:"trees_url"`
|
||||
StatusesURL string `json:"statuses_url"`
|
||||
LanguagesURL string `json:"languages_url"`
|
||||
|
||||
StargazersURL string `json:"stargazers_url"`
|
||||
ContributorsURL string `json:"contributors_url"`
|
||||
SubscribersURL string `json:"subscribers_url"`
|
||||
SubscriptionURL string `json:"subscription_url"`
|
||||
|
||||
CommitsURL string `json:"commits_url"`
|
||||
GitCommitsURL string `json:"git_commits_url"`
|
||||
CommentsURL string `json:"comments_url"`
|
||||
IssueCommentURL string `json:"issue_comment_url"`
|
||||
ContentsURL string `json:"contents_url"`
|
||||
CompareURL string `json:"compare_url"`
|
||||
MergesURL string `json:"merges_url"`
|
||||
ArchiveURL string `json:"archive_url"`
|
||||
DownloadsURL string `json:"downloads_url"`
|
||||
IssuesURL string `json:"issues_url"`
|
||||
PullsURL string `json:"pulls_url"`
|
||||
MilestonesURL string `json:"milestones_url"`
|
||||
NotificationsURL string `json:"notifications_url"`
|
||||
LabelsURL string `json:"labels_url"`
|
||||
ReleasesURL string `json:"releases_url"`
|
||||
DeploymentsURL string `json:"deployments_url"`
|
||||
}
|
||||
|
||||
type ControllerRepo struct {
|
||||
ID int `json:"id"`
|
||||
NodeID string `json:"node_id"`
|
||||
Name string `json:"name"`
|
||||
FullName string `json:"full_name"`
|
||||
Private bool `json:"private"`
|
||||
Owner struct{} `json:"owner"`
|
||||
HTMLURL string `json:"html_url"`
|
||||
Description string `json:"description"`
|
||||
Fork bool `json:"fork"`
|
||||
ForksURL string `json:"forks_url"`
|
||||
KeysURL string `json:"keys_url"`
|
||||
CollaboratorsURL string `json:"collaborators_url"`
|
||||
TeamsURL string `json:"teams_url"`
|
||||
HooksURL string `json:"hooks_url"`
|
||||
IssueEventsURL string `json:"issue_events_url"`
|
||||
EventsURL string `json:"events_url"`
|
||||
|
||||
AssigneesURL string `json:"assignees_url"`
|
||||
BranchesURL string `json:"branches_url"`
|
||||
TagsURL string `json:"tags_url"`
|
||||
BlobsURL string `json:"blobs_url"`
|
||||
GitTagsURL string `json:"git_tags_url"`
|
||||
GitRefsURL string `json:"git_refs_url"`
|
||||
TreesURL string `json:"trees_url"`
|
||||
StatusesURL string `json:"statuses_url"`
|
||||
LanguagesURL string `json:"languages_url"`
|
||||
|
||||
StargazersURL string `json:"stargazers_url"`
|
||||
ContributorsURL string `json:"contributors_url"`
|
||||
SubscribersURL string `json:"subscribers_url"`
|
||||
SubscriptionURL string `json:"subscription_url"`
|
||||
|
||||
CommitsURL string `json:"commits_url"`
|
||||
GitCommitsURL string `json:"git_commits_url"`
|
||||
CommentsURL string `json:"comments_url"`
|
||||
IssueCommentURL string `json:"issue_comment_url"`
|
||||
ContentsURL string `json:"contents_url"`
|
||||
CompareURL string `json:"compare_url"`
|
||||
MergesURL string `json:"merges_url"`
|
||||
ArchiveURL string `json:"archive_url"`
|
||||
DownloadsURL string `json:"downloads_url"`
|
||||
IssuesURL string `json:"issues_url"`
|
||||
PullsURL string `json:"pulls_url"`
|
||||
MilestonesURL string `json:"milestones_url"`
|
||||
NotificationsURL string `json:"notifications_url"`
|
||||
LabelsURL string `json:"labels_url"`
|
||||
ReleasesURL string `json:"releases_url"`
|
||||
DeploymentsURL string `json:"deployments_url"`
|
||||
}
|
||||
|
||||
type Actor struct {
|
||||
Login string `json:"login"`
|
||||
ID int `json:"id"`
|
||||
NodeID string `json:"node_id"`
|
||||
AvatarURL string `json:"avatar_url"`
|
||||
GravatarID string `json:"gravatar_id"`
|
||||
|
||||
URL string `json:"url"`
|
||||
HTMLURL string `json:"html_url"`
|
||||
FollowersURL string `json:"followers_url"`
|
||||
FollowingURL string `json:"following_url"`
|
||||
GistsURL string `json:"gists_url"`
|
||||
|
||||
StarredURL string `json:"starred_url"`
|
||||
SubscriptionsURL string `json:"subscriptions_url"`
|
||||
OrganizationsURL string `json:"organizations_url"`
|
||||
ReposURL string `json:"repos_url"`
|
||||
EventsURL string `json:"events_url"`
|
||||
|
||||
ReceivedEventsURL string `json:"received_events_url"`
|
||||
Type string `json:"type"`
|
||||
SiteAdmin bool `json:"site_admin"`
|
||||
}
|
||||
|
||||
type SkippedRepositories struct {
|
||||
AccessMismatchRepos AccessMismatchRepos `json:"access_mismatch_repos"`
|
||||
NotFoundRepos NotFoundRepos `json:"not_found_repos"`
|
||||
NoCodeqlDBRepos NoCodeqlDBRepos `json:"no_codeql_db_repos"`
|
||||
OverLimitRepos OverLimitRepos `json:"over_limit_repos"`
|
||||
}
|
||||
|
||||
type ignored_repos struct {
|
||||
RepositoryCount int `json:"repository_count"`
|
||||
Repositories []string `json:"repositories"`
|
||||
}
|
||||
|
||||
type AccessMismatchRepos struct {
|
||||
RepositoryCount int `json:"repository_count"`
|
||||
Repositories []string `json:"repositories"`
|
||||
}
|
||||
|
||||
type NotFoundRepos struct {
|
||||
RepositoryCount int `json:"repository_count"`
|
||||
RepositoryFullNames []string `json:"repository_full_names"`
|
||||
}
|
||||
|
||||
type NoCodeqlDBRepos struct {
|
||||
RepositoryCount int `json:"repository_count"`
|
||||
Repositories []string `json:"repositories"`
|
||||
}
|
||||
|
||||
type OverLimitRepos struct {
|
||||
RepositoryCount int `json:"repository_count"`
|
||||
Repositories []string `json:"repositories"`
|
||||
}
|
||||
|
||||
type SubmitResponse struct {
|
||||
ID int `json:"id"`
|
||||
ControllerRepo ControllerRepo `json:"controller_repo"`
|
||||
Actor Actor `json:"actor"`
|
||||
QueryLanguage string `json:"query_language"`
|
||||
QueryPackURL string `json:"query_pack_url"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
UpdatedAt string `json:"updated_at"`
|
||||
Status string `json:"status"`
|
||||
SkippedRepositories SkippedRepositories `json:"skipped_repositories"`
|
||||
}
|
||||
|
||||
type Repository struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
FullName string `json:"full_name"`
|
||||
Private bool `json:"private"`
|
||||
StargazersCount int `json:"stargazers_count"`
|
||||
UpdatedAt string `json:"updated_at"`
|
||||
}
|
||||
|
||||
type ScannedRepo struct {
|
||||
Repository Repository `json:"repository"`
|
||||
AnalysisStatus string `json:"analysis_status"`
|
||||
ResultCount int `json:"result_count"`
|
||||
ArtifactSizeBytes int `json:"artifact_size_in_bytes"`
|
||||
}
|
||||
|
||||
type StatusResponse struct {
|
||||
SessionId int `json:"id"`
|
||||
ControllerRepo ControllerRepo `json:"controller_repo"`
|
||||
Actor Actor `json:"actor"`
|
||||
QueryLanguage string `json:"query_language"`
|
||||
QueryPackURL string `json:"query_pack_url"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
UpdatedAt string `json:"updated_at"`
|
||||
ActionsWorkflowRunID int `json:"actions_workflow_run_id"`
|
||||
Status string `json:"status"`
|
||||
ScannedRepositories []ScannedRepo `json:"scanned_repositories"`
|
||||
SkippedRepositories SkippedRepositories `json:"skipped_repositories"`
|
||||
}
|
||||
|
||||
type SubmitMsg struct {
|
||||
ActionRepoRef string `json:"action_repo_ref"`
|
||||
Language string `json:"language"`
|
||||
QueryPack string `json:"query_pack"`
|
||||
Repositories []string `json:"repositories"`
|
||||
}
|
||||
|
||||
type SessionInfo struct {
|
||||
ID int
|
||||
Owner string
|
||||
ControllerRepo string
|
||||
|
||||
QueryPack string
|
||||
Language string
|
||||
Repositories []co.OwnerRepo
|
||||
|
||||
AccessMismatchRepos []co.OwnerRepo
|
||||
NotFoundRepos []co.OwnerRepo
|
||||
NoCodeqlDBRepos []co.OwnerRepo
|
||||
OverLimitRepos []co.OwnerRepo
|
||||
|
||||
AnalysisRepos *map[co.OwnerRepo]storage.DBLocation
|
||||
}
|
||||
|
||||
type CommanderSingle struct {
|
||||
st *State
|
||||
}
|
||||
|
||||
type State struct {
|
||||
Commander Commander
|
||||
Logger logger.Logger
|
||||
Queue queue.Queue
|
||||
Storage storage.Storage
|
||||
Runner agent.Runner
|
||||
}
|
||||
1
pkg/storage/interfaces.go
Normal file
1
pkg/storage/interfaces.go
Normal file
@@ -0,0 +1 @@
|
||||
package storage
|
||||
83
pkg/storage/storage.go
Normal file
83
pkg/storage/storage.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
|
||||
co "github.com/hohn/ghes-mirva-server/common"
|
||||
)
|
||||
|
||||
type StorageSingle struct {
|
||||
CurrentID int
|
||||
}
|
||||
|
||||
func (s *StorageSingle) NextID() int {
|
||||
s.CurrentID += 1
|
||||
return s.CurrentID
|
||||
}
|
||||
|
||||
func (s *StorageSingle) SaveQueryPack(tgz []byte, sessionId int) (string, error) {
|
||||
// Save the tar.gz body
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
slog.Error("No working directory")
|
||||
panic(err)
|
||||
}
|
||||
|
||||
dirpath := path.Join(cwd, "var", "codeql", "querypacks")
|
||||
if err := os.MkdirAll(dirpath, 0755); err != nil {
|
||||
slog.Error("Unable to create query pack output directory",
|
||||
"dir", dirpath)
|
||||
return "", err
|
||||
}
|
||||
|
||||
fpath := path.Join(dirpath, fmt.Sprintf("qp-%d.tgz", sessionId))
|
||||
err = os.WriteFile(fpath, tgz, 0644)
|
||||
if err != nil {
|
||||
slog.Error("unable to save querypack body decoding error", "path", fpath)
|
||||
return "", err
|
||||
} else {
|
||||
slog.Info("Query pack saved to ", "path", fpath)
|
||||
}
|
||||
|
||||
return fpath, nil
|
||||
}
|
||||
|
||||
// Determine for which repositories codeql databases are available.
|
||||
//
|
||||
// Those will be the analysis_repos. The rest will be skipped.
|
||||
func (s *StorageSingle) FindAvailableDBs(analysisReposRequested []co.OwnerRepo) (not_found_repos []co.OwnerRepo,
|
||||
analysisRepos *map[co.OwnerRepo]DBLocation) {
|
||||
slog.Debug("Looking for available CodeQL databases")
|
||||
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
slog.Error("No working directory")
|
||||
return
|
||||
}
|
||||
|
||||
analysisRepos = &map[co.OwnerRepo]DBLocation{}
|
||||
|
||||
not_found_repos = []co.OwnerRepo{}
|
||||
|
||||
for _, rep := range analysisReposRequested {
|
||||
dbPrefix := filepath.Join(cwd, "codeql", "dbs", rep.Owner, rep.Repo)
|
||||
dbName := fmt.Sprintf("%s_%s_db.zip", rep.Owner, rep.Repo)
|
||||
dbPath := filepath.Join(dbPrefix, dbName)
|
||||
|
||||
if _, err := os.Stat(dbPath); errors.Is(err, fs.ErrNotExist) {
|
||||
slog.Info("Database does not exist for repository ", "owner/repo", rep,
|
||||
"path", dbPath)
|
||||
not_found_repos = append(not_found_repos, rep)
|
||||
} else {
|
||||
slog.Info("Found database for ", "owner/repo", rep, "path", dbPath)
|
||||
(*analysisRepos)[rep] = DBLocation{Prefix: dbPrefix, File: dbName}
|
||||
}
|
||||
}
|
||||
return not_found_repos, analysisRepos
|
||||
}
|
||||
17
pkg/storage/types.go
Normal file
17
pkg/storage/types.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
co "github.com/hohn/ghes-mirva-server/common"
|
||||
)
|
||||
|
||||
type Storage interface {
|
||||
NextID() int
|
||||
SaveQueryPack(tgz []byte, sessionID int) (storagePath string, error error)
|
||||
FindAvailableDBs(analysisReposRequested []co.OwnerRepo) (not_found_repos []co.OwnerRepo,
|
||||
analysisRepos *map[co.OwnerRepo]DBLocation)
|
||||
}
|
||||
|
||||
type DBLocation struct {
|
||||
Prefix string
|
||||
File string
|
||||
}
|
||||
Reference in New Issue
Block a user