Clean up and fix custom logging code

Unrecognized json caused duplicate output.

Specifically:
     2024/02/12 18:58:54 WARNING: json decode error: json: unknown field "id"
This commit is contained in:
Michael Hohn
2024-02-13 11:30:14 -08:00
committed by =Michael Hohn
parent 2877835899
commit ff94bfa9f5
3 changed files with 832 additions and 832 deletions

View File

@@ -190,12 +190,12 @@
# Build it # Build it
go clean go clean
go build -gcflags="all=-N -l" . # go build . go build . # go build -gcflags="all=-N -l" .
./gh-mrva -h ./gh-mrva -h
# In log-submit-the-mrva-job.log after edit # In log-submit-the-mrva-job.log after edit
SN=41 SN=54
./gh-mrva submit --language cpp --session mirva-session-$SN \ ./gh-mrva submit --language cpp --session mirva-session-$SN \
--list mirva-list \ --list mirva-list \
--query /Users/hohn/local/gh-mrva/FlatBuffersFunc.ql >& log-$SN.out & --query /Users/hohn/local/gh-mrva/FlatBuffersFunc.ql >& log-$SN.out &

File diff suppressed because one or more lines are too long

146
main.go
View File

@@ -51,37 +51,7 @@ func main() {
cmd.Execute() cmd.Execute()
} }
func LogRequestDump(req *http.Request) { func IsBase64Gzip(val []byte) bool {
log.Printf(">> %s %s", req.Method, req.URL)
req.Body = LogBody(req.Body, "request")
}
func LogBody(body io.ReadCloser, from string) io.ReadCloser {
if body != nil {
buf, err := io.ReadAll(body)
if err != nil {
var w http.ResponseWriter
log.Fatalf("Error reading %s body: %v", from, err.Error())
http.Error(w, err.Error(), http.StatusInternalServerError)
return nil
}
IsZipFile := func() bool {
if len(buf) >= 4 {
// The header is []byte{ 0x50, 0x4b, 0x03, 0x04 }
magic := []byte{0x50, 0x4b, 0x03, 0x04}
if bytes.Equal(buf[0:4], magic) {
return true
} else {
return false
}
} else {
return false
}
}
IsBase64Gzip := func(val []byte) bool {
// Some important payloads can be listed via // Some important payloads can be listed via
// base64 -d < foo1 | gunzip | tar t|head -20 // base64 -d < foo1 | gunzip | tar t|head -20
// //
@@ -107,7 +77,26 @@ func LogBody(body io.ReadCloser, from string) io.ReadCloser {
} }
} }
MaybeJSON := func() bool { func LogRequestDump(req *http.Request) {
log.Printf(">> %s %s", req.Method, req.URL)
req.Body = LogBody(req.Body, "request")
}
func IsZipFile(buf []byte) bool {
if len(buf) >= 4 {
// The header is []byte{ 0x50, 0x4b, 0x03, 0x04 }
magic := []byte{0x50, 0x4b, 0x03, 0x04}
if bytes.Equal(buf[0:4], magic) {
return true
} else {
return false
}
} else {
return false
}
}
func MaybeJSON(buf []byte) bool {
if len(buf) >= 4 { // {""} is 4 characters if len(buf) >= 4 { // {""} is 4 characters
if bytes.Equal(buf[0:2], []byte("{\"")) { if bytes.Equal(buf[0:2], []byte("{\"")) {
return true return true
@@ -119,47 +108,42 @@ func LogBody(body io.ReadCloser, from string) io.ReadCloser {
} }
} }
if IsZipFile() { type SubmitMsg struct {
// Show index for pk zip archives
buf1 := make([]byte, len(buf))
copy(buf1, buf)
r, err := zip.NewReader(bytes.NewReader(buf1), int64(len(buf1)))
if err != nil {
log.Fatal(err)
}
// defer r.Close()
// Print the archive index
log.Printf(">> %s body:\n", from)
log.Printf("zip file, contents:\n")
for _, f := range r.File {
log.Printf("\t%s\n", f.Name)
}
} else if MaybeJSON() {
// TODO: show index for encoded query packs in the json <value>:
// {..."query_pack": <value>,...}
//
type Message struct {
// FIXME: exact structure
ActionRepoRef string `json:"action_repo_ref"` ActionRepoRef string `json:"action_repo_ref"`
Language string `json:"language"` Language string `json:"language"`
QueryPack string `json:"query_pack"` QueryPack string `json:"query_pack"`
Repositories []string `json:"repositories"` Repositories []string `json:"repositories"`
} }
func LogBody(body io.ReadCloser, from string) io.ReadCloser {
if body != nil {
buf, err := io.ReadAll(body)
if err != nil {
var w http.ResponseWriter
log.Fatalf("Error reading %s body: %v", from, err.Error())
http.Error(w, err.Error(), http.StatusInternalServerError)
return nil
}
if IsZipFile(buf) {
ShowZipIndex(buf, from)
} else if MaybeJSON(buf) {
// See if the json contains a known message
buf1 := make([]byte, len(buf)) buf1 := make([]byte, len(buf))
copy(buf1, buf) copy(buf1, buf)
dec := json.NewDecoder(bytes.NewReader(buf1)) dec := json.NewDecoder(bytes.NewReader(buf1))
dec.DisallowUnknownFields() dec.DisallowUnknownFields()
var m SubmitMsg
err := dec.Decode(&m)
var m Message if err != nil {
if err := dec.Decode(&m); err == io.EOF {
log.Printf(">> %s body: %v", from, string(buf))
} else if err != nil {
log.Printf("WARNING: json decode error: %s\n", err)
log.Printf(">> %s body: %v", from, string(buf)) log.Printf(">> %s body: %v", from, string(buf))
goto BodyDone
} }
// Print index for encoded query packs in the json <value>:
// {..."query_pack": <value>,...}
log.Printf(">> %s body:\n", from) log.Printf(">> %s body:\n", from)
log.Printf(" \"%s\": \"%s\"\n", "action_repo_ref", m.ActionRepoRef) log.Printf(" \"%s\": \"%s\"\n", "action_repo_ref", m.ActionRepoRef)
log.Printf(" \"%s\": \"%s\"\n", "language", m.Language) log.Printf(" \"%s\": \"%s\"\n", "language", m.Language)
@@ -167,6 +151,22 @@ func LogBody(body io.ReadCloser, from string) io.ReadCloser {
// Provide custom logging for encoded, compressed tar file // Provide custom logging for encoded, compressed tar file
if IsBase64Gzip([]byte(m.QueryPack)) { if IsBase64Gzip([]byte(m.QueryPack)) {
LogBase64GzippedTar(m)
} else {
log.Printf(" \"%s\": \"%s\"\n", "query_pack", m.QueryPack)
}
} else {
log.Printf(">> %s body: %v", from, string(buf))
}
BodyDone:
reader := io.NopCloser(bytes.NewBuffer(buf))
return reader
}
return body
}
func LogBase64GzippedTar(m SubmitMsg) {
// These are decoded manually via // These are decoded manually via
// base64 -d < foo1 | gunzip | tar t | head -20 // base64 -d < foo1 | gunzip | tar t | head -20
// but we need complete logs for inspection and testing. // but we need complete logs for inspection and testing.
@@ -174,7 +174,6 @@ func LogBody(body io.ReadCloser, from string) io.ReadCloser {
data, err := base64.StdEncoding.DecodeString(m.QueryPack) data, err := base64.StdEncoding.DecodeString(m.QueryPack)
if err != nil { if err != nil {
log.Fatalln("body decoding error:", err) log.Fatalln("body decoding error:", err)
return nil
} }
// gunzip the decoded body // gunzip the decoded body
gzb := bytes.NewBuffer(data) gzb := bytes.NewBuffer(data)
@@ -197,17 +196,24 @@ func LogBody(body io.ReadCloser, from string) io.ReadCloser {
// TODO: head / tail the listing // TODO: head / tail the listing
log.Printf(" %s\n", hdr.Name) log.Printf(" %s\n", hdr.Name)
} }
} else {
log.Printf(" \"%s\": \"%s\"\n", "query_pack", m.QueryPack)
}
} else {
log.Printf(">> %s body: %v", from, string(buf))
} }
reader := io.NopCloser(bytes.NewBuffer(buf)) func ShowZipIndex(buf []byte, from string) {
return reader buf1 := make([]byte, len(buf))
copy(buf1, buf)
r, err := zip.NewReader(bytes.NewReader(buf1), int64(len(buf1)))
if err != nil {
log.Fatal(err)
}
// Print the archive index
log.Printf(">> %s body:\n", from)
log.Printf("zip file, contents:\n")
for _, f := range r.File {
log.Printf("\t%s\n", f.Name)
} }
return body
} }
type contextKey struct { type contextKey struct {