diff --git a/.devcontainer/.zshrc b/.devcontainer/.zshrc deleted file mode 100644 index 45b1a63..0000000 --- a/.devcontainer/.zshrc +++ /dev/null @@ -1,13 +0,0 @@ -export ZSH="$HOME/.oh-my-zsh" - -ZSH_THEME="robbyrussell" - -plugins=( - zsh-autosuggestions - zsh-syntax-highlighting - you-should-use - git - golang -) - -source $ZSH/oh-my-zsh.sh diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile deleted file mode 100644 index f9e2e18..0000000 --- a/.devcontainer/Dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -FROM golang:1.24 - -RUN apt-get update && \ - apt-get install -y zsh tree && \ - rm -rf /var/lib/apt/lists/* - -RUN useradd -m vscode -USER vscode -WORKDIR /workspace - -ENV PATH=$PATH:/usr/local/go/bin:/go/bin - -RUN go install github.com/go-delve/delve/cmd/dlv@latest -RUN curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/HEAD/install.sh | sh -s -- -b $(go env GOPATH)/bin v2.0.2 - -RUN sh -c "$(wget https://raw.githubusercontent.com/ohmyzsh/ohmyzsh/master/tools/install.sh -O -)" && \ - git clone https://github.com/zsh-users/zsh-autosuggestions ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-autosuggestions && \ - git clone https://github.com/zsh-users/zsh-syntax-highlighting.git ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-syntax-highlighting && \ - git clone https://github.com/MichaelAquilina/zsh-you-should-use.git ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/you-should-use - -COPY --chown=vscode:vscode .zshrc /home/vscode/.zshrc diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index d1df9fd..b257b01 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,9 +1,10 @@ { "name": "Go", - "dockerFile": "Dockerfile", + "image": "mcr.microsoft.com/devcontainers/go:1.24", // Switch to 1.25 when it's released "remoteUser": "vscode", "shutdownAction": "stopContainer", - "postCreateCommand": "go mod download", + "initializeCommand": "./.devcontainer/initialize.sh", + "postCreateCommand": "./.devcontainer/post-create.sh", "customizations": { "vscode": { "settings": { @@ -17,5 +18,12 @@ }, "mounts": [ "source=go-modules,target=/go,type=volume" // Keep go modules in a volume - ] + ], + "features": { + "ghcr.io/devcontainers-extra/features/zsh-plugins:0": { + "plugins": "git golang zsh-autosuggestions zsh-syntax-highlighting zsh-you-should-use", + "omzPlugins": "https://github.com/zsh-users/zsh-autosuggestions https://github.com/zsh-users/zsh-syntax-highlighting https://github.com/MichaelAquilina/zsh-you-should-use", + "username": "vscode" + } + } } diff --git a/.devcontainer/initialize.sh b/.devcontainer/initialize.sh new file mode 100755 index 0000000..9e60b18 --- /dev/null +++ b/.devcontainer/initialize.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +if [ "$(uname)" == "Darwin" ]; then + CHARLES_APP=$(mdfind "kMDItemCFBundleIdentifier == 'com.xk72.Charles'" | head -n 1) + + if [ -d "$CHARLES_APP" ]; then + rm -rf .certs + mkdir -p .certs + "$CHARLES_APP/Contents/MacOS/Charles" ssl export .certs/charles-ssl.pem + else + echo "Charles is not installed, skipping certificate export." + fi +fi diff --git a/.devcontainer/post-create.sh b/.devcontainer/post-create.sh new file mode 100755 index 0000000..ec4b3f3 --- /dev/null +++ b/.devcontainer/post-create.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +go mod download + +sudo cp .certs/charles-ssl.pem /usr/local/share/ca-certificates/charles-ssl-proxying-certificate.crt +sudo update-ca-certificates diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7beaffa --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +# DEVCONTAINER +.certs + +# DEBUG +example-harkit +*.har + +# OS +.DS_Store diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..2e33b1c --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,15 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Debug example/main.go", + "type": "go", + "request": "launch", + "mode": "auto", + "program": "${workspaceFolder}/example/main.go" + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json index e5f5ad3..9d78bad 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -4,6 +4,9 @@ "files.autoSave": "onFocusChange", "files.insertFinalNewline": true, "files.trimFinalNewlines": true, + "files.exclude": { + "**/.certs": true + }, // GOLANG SETTINGS: "go.toolsManagement.autoUpdate": true, "go.useLanguageServer": true, diff --git a/README.md b/README.md index 25389fe..29d60e2 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,34 @@ # HAR file management library -A Golang library for parsing and managing HAR (HTTP Archive) files. Provides easy-to-use structs and functions for loading, inspecting, and manipulating HAR files, making HTTP traffic analysis and debugging simpler. +A Golang library for parsing and managing HAR (HTTP Archive) files. Provides easy-to-use structs and functions for loading, inspecting, and manipulating HAR files, making HTTP traffic analysis and debugging simpler. This library is designed to be used with the following libraries: + +* [`bogdanfin/tls-client`](https://github.com/bogdanfinn/tls-client) +* The standard **`net/http`** library +* Other **custom request/response structures** + +## Purpose + +* Provide a **complete and persistent history of requests and responses**. +* Facilitate **monitoring, tracking, and debugging** of request systems. +* Offer a **1:1 equivalent of HAR exports** produced by tools like **Charles Proxy** or **Proxyman** during SSL proxying. + +## Functional Requirements / Specifications + +* **Multi-source compatibility:** Capture and generate HAR files from **tls-client**, **net/http**, or any **custom structs**. +* **Maximum fidelity to the HAR standard:** Match as closely as possible the format and content produced by **Charles Proxy** (as a reference for export quality). +* **Strict header order preservation:** Maintain the exact order of headers as defined by the TLS layer (which Go does not guarantee by default—requires specific handling). +* **Simplified API:** + * Create a **HAR session** via a dedicated function. + * Add a **request** to the HAR. + * Add the **corresponding response** via a complementary function. + * Explicit handling of **requests without responses** (timeouts, cancellations, network errors). +* **Additional fields beyond the HAR standard:** + * **IP address** used during the TLS connection. + * **Session ID** for session monitoring and tracking. + +## Bonus / Potential Extensions + +* **Monitoring integration:** Native export compatible with **Prometheus / Grafana**, or extract metrics directly from HAR files for real-time visualization. +* **Advanced historization:** Automatic HAR file storage in an **S3 bucket**, with associated **metadata/tags**: + * Final request status: **success**, **failure**, **timeout**, **HTTP 5xx**, etc. + * **Category / service / user tagging** for easier identification. diff --git a/converter/common.go b/converter/common.go new file mode 100644 index 0000000..1b4178e --- /dev/null +++ b/converter/common.go @@ -0,0 +1,79 @@ +package converter + +import ( + "fmt" + "strings" + "time" + + "github.com/Mathious6/harkit/harfile" + http "github.com/bogdanfinn/fhttp" +) + +const ( + DefaultRequestHTTPVersion = "HTTP/2.0" + ContentLengthKey = "Content-Length" + ContentTypeKey = "Content-Type" + CookieKey = "Cookie" + SetCookieKey = "Set-Cookie" + LocationKey = "Location" +) + +func convertCookies(cookies []*http.Cookie) []*harfile.Cookie { + harCookies := make([]*harfile.Cookie, len(cookies)) + for index, cookie := range cookies { + harCookies[index] = &harfile.Cookie{ + Name: cookie.Name, + Value: cookie.Value, + Path: cookie.Path, + Domain: cookie.Domain, + Expires: formatExpires(cookie.Expires), + HTTPOnly: cookie.HttpOnly, + Secure: cookie.Secure, + } + } + return harCookies +} + +func formatExpires(expires time.Time) string { + if expires.IsZero() { + return "" + } + return expires.Format(time.RFC3339Nano) +} + +func convertHeaders(header http.Header, contentLength int64) []*harfile.NVPair { + // By default, client adds Content-Length header later on, so we need to add it here. + // We clone the header to avoid modifying the original one to avoid side effects. + clonedHeader := header.Clone() + if contentLength > 0 && clonedHeader.Get(ContentLengthKey) == "" { + clonedHeader.Set(ContentLengthKey, fmt.Sprintf("%d", contentLength)) + } + + harHeaders := make([]*harfile.NVPair, 0, len(clonedHeader)) + seen := make(map[string]bool) + + // Used to sort headers in HAR file if needed (e.g. https://github.com/bogdanfinn/tls-client) + order := clonedHeader.Values(http.HeaderOrderKey) + for _, name := range order { + canonical := http.CanonicalHeaderKey(name) + values := clonedHeader.Values(name) + + if len(values) > 0 { + for _, value := range values { + harHeaders = append(harHeaders, &harfile.NVPair{Name: canonical, Value: value}) + } + seen[canonical] = true + } + } + + for name, values := range clonedHeader { + if seen[name] || strings.EqualFold(name, http.HeaderOrderKey) { + continue + } + for _, value := range values { + harHeaders = append(harHeaders, &harfile.NVPair{Name: name, Value: value}) + } + } + + return harHeaders +} diff --git a/converter/request.go b/converter/request.go new file mode 100644 index 0000000..9c3c258 --- /dev/null +++ b/converter/request.go @@ -0,0 +1,153 @@ +package converter + +import ( + "errors" + "io" + "net/url" + "strings" + + "github.com/Mathious6/harkit/harfile" + http "github.com/bogdanfinn/fhttp" +) + +const ( + applicationXWWWFormURLEncoded = "application/x-www-form-urlencoded" + multipartFormData = "multipart/form-data" + maxMultipartFormDataSize = 32 << 20 // 32 MB limit + + methodKey = ":method" + authorityKey = ":authority" + schemeKey = ":scheme" + pathKey = ":path" + + hostKey = "Host" +) + +func FromHTTPRequest(req *http.Request) (*harfile.Request, error) { + if req == nil { + return nil, errors.New("request cannot be nil") + } + + reqProto := DefaultRequestHTTPVersion // WARNING: req.Proto is not always accurate so we force it. + + protocolHeader := handleRequestProtocolHeader(reqProto, req.Method, *req.URL) + headers := convertHeaders(req.Header, req.ContentLength) + + postData, err := extractRequestPostData(req) + if err != nil { + return nil, err + } + + return &harfile.Request{ + Method: req.Method, + URL: req.URL.String(), + HTTPVersion: reqProto, + Cookies: convertCookies(req.Cookies()), + Headers: append(protocolHeader, headers...), + QueryString: convertRequestQueryParams(req.URL), + PostData: postData, + HeadersSize: -1, + BodySize: req.ContentLength, + }, nil +} + +func handleRequestProtocolHeader(proto string, method string, url url.URL) []*harfile.NVPair { + if proto == "HTTP/2.0" { + return []*harfile.NVPair{ + {Name: methodKey, Value: method}, + {Name: authorityKey, Value: url.Host}, + {Name: schemeKey, Value: url.Scheme}, + {Name: pathKey, Value: url.RequestURI()}, + } + } else { + return []*harfile.NVPair{ + {Name: hostKey, Value: url.Host}, + } + } +} + +func convertRequestQueryParams(u *url.URL) []*harfile.NVPair { + result := make([]*harfile.NVPair, 0) + + for key, values := range u.Query() { + for _, value := range values { + result = append(result, &harfile.NVPair{Name: key, Value: value}) + } + } + + return result +} + +func extractRequestPostData(req *http.Request) (*harfile.PostData, error) { + if req.Body == nil || req.ContentLength == 0 { + return nil, nil + } + + body, err := req.GetBody() + if err != nil { + return nil, err + } + defer body.Close() + + bodyText, err := io.ReadAll(body) + if err != nil { + return nil, err + } + + mimeType := req.Header.Get(ContentTypeKey) + postData := &harfile.PostData{MimeType: mimeType} + + if strings.HasPrefix(mimeType, applicationXWWWFormURLEncoded) { + pairs := strings.SplitSeq(string(bodyText), "&") + + for pair := range pairs { + nv := strings.SplitN(pair, "=", 2) + if len(nv) == 2 { + name, value := nv[0], nv[1] + postData.Params = append(postData.Params, &harfile.Param{Name: name, Value: value}) + } + } + + return postData, nil + } + + if strings.HasPrefix(mimeType, multipartFormData) { + err := req.ParseMultipartForm(maxMultipartFormDataSize) + if err != nil { + return nil, err + } + + for name, values := range req.MultipartForm.Value { + for _, value := range values { + postData.Params = append(postData.Params, &harfile.Param{Name: name, Value: value}) + } + } + + for name, files := range req.MultipartForm.File { + for _, fileHeader := range files { + file, err := fileHeader.Open() + if err != nil { + return nil, err + } + defer file.Close() + + content, err := io.ReadAll(file) + if err != nil { + return nil, err + } + + postData.Params = append(postData.Params, &harfile.Param{ + Name: name, + FileName: fileHeader.Filename, + ContentType: fileHeader.Header.Get(ContentTypeKey), + Value: string(content), + }) + } + } + + return postData, nil + } + + postData.Text = string(bodyText) + return postData, nil +} diff --git a/converter/request_test.go b/converter/request_test.go new file mode 100644 index 0000000..d32f1be --- /dev/null +++ b/converter/request_test.go @@ -0,0 +1,238 @@ +package converter_test + +import ( + "bytes" + "io" + "mime/multipart" + "strings" + "testing" + + "github.com/Mathious6/harkit/converter" + http "github.com/bogdanfinn/fhttp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +const ( + REQ_METHOD = http.MethodPost + REQ_URL = "https://example.com/api?foo=bar" + REQ_URI = "/api?foo=bar" + REQ_PROTOCOL = "HTTP/2.0" + REQ_PROTOCOL_HEADERS_COUNT = 4 + + REQ_HEADER1_NAME = "Name1" + REQ_HEADER1_VALUE = "value1" + REQ_HEADER2_NAME = "Name2" + REQ_HEADER2_VALUE = "value2" + + REQ_COOKIE_NAME = "name" + REQ_COOKIE_VALUE = "value" + + REQ_URL_CONTENT_TYPE = "application/x-www-form-urlencoded" + REQ_BODY_URL = "foo=bar" + + REQ_JSON_CONTENT_TYPE = "application/json" + REQ_BODY_JSON = `{"foo":"bar"}` + REQ_JSON_CONTENT_LENGTH_VALUE = "13" + + REQ_PART1_NAME = "name1" + REQ_PART1_VALUE = "value1" + REQ_PART2_NAME = "file" + REQ_PART2_VALUE = "content" + REQ_PART2_FILENAME = "test.txt" + REQ_PART2_CONTENT_TYPE = "application/octet-stream" +) + +func TestConverter_GivenNilRequest_WhenConvertingHTTPRequest_ThenErrorShouldBeReturned(t *testing.T) { + req := (*http.Request)(nil) + + result, err := converter.FromHTTPRequest(req) + + assert.Error(t, err, "Error should be returned when request is nil") + assert.Nil(t, result, "HAR should be nil when request is nil") +} + +func TestConverter_GivenMethod_WhenConvertingHTTPRequest_ThenMethodShouldBeCorrect(t *testing.T) { + req := createRequest(t, nil, "") + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + assert.Equal(t, REQ_METHOD, result.Method, "HAR method <> request method") +} + +func TestConverter_GivenURL_WhenConvertingHTTPRequest_ThenURLShouldBeCorrect(t *testing.T) { + req := createRequest(t, nil, "") + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + assert.Equal(t, REQ_URL, result.URL, "HAR URL <> request URL") +} + +func TestConverter_GivenProtocol_WhenConvertingHTTPRequest_ThenProtocolShouldBeCorrect(t *testing.T) { + req := createRequest(t, nil, "") + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + // assert.Equal(t, REQ_PROTOCOL, result.HTTPVersion, "HAR protocol <> request protocol") + assert.Equal(t, "HTTP/2.0", result.HTTPVersion, "HAR protocol <> request protocol") // WARNING: we force it. +} + +func TestConverter_GivenCookies_WhenConvertingHTTPRequest_ThenCookiesShouldBeCorrect(t *testing.T) { + req := createRequest(t, nil, "") + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + assert.Len(t, result.Cookies, 1, "HAR should contain 1 cookie") + assert.Equal(t, REQ_COOKIE_NAME, result.Cookies[0].Name, "HAR cookie name <> request cookie name") + assert.Equal(t, REQ_COOKIE_VALUE, result.Cookies[0].Value, "HAR cookie value <> request cookie value") +} + +func TestConverter_GivenHeaders_WhenConvertingHTTPRequest_ThenHeadersShouldBeCorrectAndOrdered(t *testing.T) { + req := createRequest(t, nil, "") + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + assert.Len(t, result.Headers, REQ_PROTOCOL_HEADERS_COUNT+3, "HAR should contain 3 headers") + assert.Equal(t, REQ_HEADER2_NAME, result.Headers[REQ_PROTOCOL_HEADERS_COUNT+0].Name, "HAR header name <> request header name") + assert.Equal(t, REQ_HEADER2_VALUE, result.Headers[REQ_PROTOCOL_HEADERS_COUNT+0].Value, "HAR header value <> request header value") + assert.Equal(t, REQ_HEADER1_NAME, result.Headers[REQ_PROTOCOL_HEADERS_COUNT+1].Name, "HAR header name <> request header name") + assert.Equal(t, REQ_HEADER1_VALUE, result.Headers[REQ_PROTOCOL_HEADERS_COUNT+1].Value, "HAR header value <> request header value") +} + +func TestConverter_GivenURLWithQueryString_WhenConvertingHTTPRequest_ThenQueryStringShouldBeCorrect(t *testing.T) { + req := createRequest(t, nil, "") + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + assert.Len(t, result.QueryString, 1, "HAR should contain 1 query string parameters") + assert.Equal(t, "foo", result.QueryString[0].Name, "HAR query string name <> request query string name") + assert.Equal(t, "bar", result.QueryString[0].Value, "HAR query string value <> request query string value") +} + +func TestConverter_GivenEmptyBody_WhenConvertingHTTPRequest_ThenContentLengthShouldNotBeSet(t *testing.T) { + req := createRequest(t, nil, "") + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + assert.NotEqual(t, converter.ContentLengthKey, result.Headers[2].Name, "HAR content length header value should not be set") +} + +func TestConverter_GivenURLEncodedBody_WhenConvertingHTTPRequest_ThenPostDataShouldBeCorrect(t *testing.T) { + req := createRequest(t, strings.NewReader(REQ_BODY_URL), REQ_URL_CONTENT_TYPE) + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + assert.Len(t, result.PostData.Params, 1, "HAR should contain 1 post data parameters") + assert.Empty(t, result.PostData.Text, "HAR should have no post data text") + assert.Equal(t, "foo", result.PostData.Params[0].Name, "HAR post data name <> request post data name") + assert.Equal(t, "bar", result.PostData.Params[0].Value, "HAR post data value <> request post data value") + assert.Equal(t, REQ_URL_CONTENT_TYPE, result.PostData.MimeType, "HAR post data mime type <> request post data mime type") +} + +func TestConverter_GivenJSONBody_WhenConvertingHTTPRequest_ThenPostDataShouldBeCorrect(t *testing.T) { + req := createRequest(t, strings.NewReader(REQ_BODY_JSON), REQ_JSON_CONTENT_TYPE) + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + assert.Len(t, result.PostData.Params, 0, "HAR should contain 0 post data parameters") + assert.NotEmpty(t, result.PostData.Text, "HAR should have post data text") + assert.Equal(t, REQ_BODY_JSON, result.PostData.Text, "HAR post data text <> request post data text") + assert.Equal(t, REQ_JSON_CONTENT_TYPE, result.PostData.MimeType, "HAR post data mime type <> request post data mime type") +} + +func TestConverter_GivenJSONBody_WhenConvertingHTTPRequest_ThenContentLengthShouldBeSet(t *testing.T) { + req := createRequest(t, strings.NewReader(REQ_BODY_JSON), REQ_JSON_CONTENT_TYPE) + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + assert.Equal(t, converter.ContentLengthKey, result.Headers[REQ_PROTOCOL_HEADERS_COUNT+2].Name, "HAR content length header name <> request content length header name") + assert.Equal(t, REQ_JSON_CONTENT_LENGTH_VALUE, result.Headers[REQ_PROTOCOL_HEADERS_COUNT+2].Value, "HAR content length header value <> request content length header value") +} + +func TestConverter_GivenMultipartBody_WhenConvertingHTTPRequest_ThenPostDataShouldBeCorrect(t *testing.T) { + body, contentType := createMultipartBody() + req := createRequest(t, &body, contentType) + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + assert.Len(t, result.PostData.Params, 2, "HAR should contain 2 post data parameters") + assert.Empty(t, result.PostData.Text, "HAR should have no post data text") + assert.Equal(t, REQ_PART1_NAME, result.PostData.Params[0].Name, "HAR post data name <> request post data name") + assert.Equal(t, REQ_PART1_VALUE, result.PostData.Params[0].Value, "HAR post data value <> request post data value") + assert.Equal(t, REQ_PART2_NAME, result.PostData.Params[1].Name, "HAR post data name <> request post data name") + assert.Equal(t, REQ_PART2_VALUE, result.PostData.Params[1].Value, "HAR post data value <> request post data value") + assert.Equal(t, REQ_PART2_FILENAME, result.PostData.Params[1].FileName, "HAR post data filename <> request post data filename") + assert.Equal(t, REQ_PART2_CONTENT_TYPE, result.PostData.Params[1].ContentType, "HAR post data content type <> request post data content type") + assert.Equal(t, contentType, result.PostData.MimeType, "HAR post data mime type <> request post data mime type") +} + +func TestConverter_GivenHeaders_WhenConvertingHTTPRequest_ThenHeadersSizeShouldBeCorrect(t *testing.T) { + req := createRequest(t, nil, "") + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + // assert.Equal(t, computeHeadersSize(), result.HeadersSize, "HAR header size <> request header size") + assert.Equal(t, int64(-1), result.HeadersSize, "HAR header size <> request header size") // WARNING: we force it. +} + +func TestConverter_GivenBody_WhenConvertingHTTPRequest_ThenBodySizeShouldBeCorrect(t *testing.T) { + req := createRequest(t, strings.NewReader(REQ_BODY_URL), REQ_URL_CONTENT_TYPE) + + result, err := converter.FromHTTPRequest(req) + require.NoError(t, err) + + expectedSize := int64(len(REQ_BODY_URL)) + assert.Equal(t, expectedSize, result.BodySize, "HAR body size <> request body size") +} + +// createRequest creates and returns a new HTTP request with the specified body and content type. +// It sets various headers including cookies, custom headers, and content type if provided. +// The function also ensures the request protocol is set and validates the request creation. +func createRequest(t *testing.T, body io.Reader, contentType string) *http.Request { + req, err := http.NewRequest(REQ_METHOD, REQ_URL, body) + require.NoError(t, err) + + req.Proto = REQ_PROTOCOL + + req.Header.Add(converter.CookieKey, REQ_COOKIE_NAME+"="+REQ_COOKIE_VALUE) + req.Header.Add(REQ_HEADER1_NAME, REQ_HEADER1_VALUE) + req.Header.Add(REQ_HEADER2_NAME, REQ_HEADER2_VALUE) + + req.Header.Add(http.HeaderOrderKey, REQ_HEADER2_NAME) + req.Header.Add(http.HeaderOrderKey, REQ_HEADER1_NAME) + req.Header.Add(http.HeaderOrderKey, converter.ContentLengthKey) + + if contentType != "" { + req.Header.Add(converter.ContentTypeKey, contentType) + } + + return req +} + +// createMultipartBody constructs a multipart HTTP request body with predefined fields and file content. +// It returns the body as a bytes.Buffer and the corresponding Content-Type header value. +func createMultipartBody() (body bytes.Buffer, contentType string) { + var buf bytes.Buffer + writer := multipart.NewWriter(&buf) + + _ = writer.WriteField(REQ_PART1_NAME, REQ_PART1_VALUE) + + fileWriter, _ := writer.CreateFormFile(REQ_PART2_NAME, REQ_PART2_FILENAME) + _, _ = fileWriter.Write([]byte(REQ_PART2_VALUE)) + + writer.Close() + + return buf, writer.FormDataContentType() +} diff --git a/converter/response.go b/converter/response.go new file mode 100644 index 0000000..27547ad --- /dev/null +++ b/converter/response.go @@ -0,0 +1,74 @@ +package converter + +import ( + "bytes" + "errors" + "io" + "strconv" + + "github.com/Mathious6/harkit/harfile" + http "github.com/bogdanfinn/fhttp" +) + +func FromHTTPResponse(resp *http.Response) (*harfile.Response, error) { + if resp == nil { + return nil, errors.New("response cannot be nil") + } + + content, err := buildResponseContent(resp) + if err != nil { + return nil, err + } + + protocolHeader := handleResponseProtocolHeader(resp.Proto, resp.StatusCode) + headers := convertHeaders(resp.Header, resp.ContentLength) + + return &harfile.Response{ + Status: int64(resp.StatusCode), + StatusText: http.StatusText(resp.StatusCode), + HTTPVersion: resp.Proto, + Cookies: convertCookies(resp.Cookies()), + Headers: append(protocolHeader, headers...), + Content: content, + RedirectURL: locateRedirectURL(resp), + HeadersSize: -1, + BodySize: content.Size, + }, nil +} + +func handleResponseProtocolHeader(proto string, status int) []*harfile.NVPair { + if proto == "HTTP/2.0" { + return []*harfile.NVPair{ + {Name: ":status", Value: strconv.Itoa(status)}, + } + } else { + return []*harfile.NVPair{} + } +} + +func locateRedirectURL(resp *http.Response) *string { + if resp.StatusCode >= 300 && resp.StatusCode < 400 { + if loc, err := resp.Location(); err == nil { + url := loc.String() + return &url + } + } + return nil +} + +func buildResponseContent(resp *http.Response) (*harfile.Content, error) { + buf, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + defer resp.Body.Close() + resp.Body = io.NopCloser(bytes.NewReader(buf)) + + return &harfile.Content{ + Size: int64(len(buf)), + Compression: 0, + MimeType: resp.Header.Get(ContentTypeKey), + Text: string(buf), + Encoding: "", + }, nil +} diff --git a/converter/response_test.go b/converter/response_test.go new file mode 100644 index 0000000..236fd11 --- /dev/null +++ b/converter/response_test.go @@ -0,0 +1,191 @@ +package converter_test + +import ( + "bytes" + "io" + "testing" + "time" + + "github.com/Mathious6/harkit/converter" + http "github.com/bogdanfinn/fhttp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +const ( + RESP_OK_CODE = http.StatusOK + RESP_FOUND_CODE = http.StatusFound + RESP_PROTOCOL = "HTTP/1.1" + + RESP_HEADER_NAME = "Name" + RESP_HEADER_VALUE = "value" + + RESP_COOKIE_NAME = "name" + RESP_COOKIE_VALUE = "value" + RESP_COOKIE_PATH = "/" + RESP_COOKIE_DOMAIN = "example.com" + RESP_COOKIE_EXPIRES = "Mon, 12 May 2025 00:00:00 GMT" + RESP_COOKIE_HTTPONLY = true + RESP_COOKIE_SECURE = true + + RESP_BODY_TEXT = "response" + RESP_CONTENT_TYPE = "text/plain" + RESP_JSON_CONTENT_LENGTH_VALUE = "8" + + RESP_LOCATION = "https://example.com/redirect" +) + +func TestConverter_GivenNilResponse_WhenConvertingHTTPResponse_ThenErrorShouldBeReturned(t *testing.T) { + resp := (*http.Response)(nil) + + result, err := converter.FromHTTPResponse(resp) + + assert.Error(t, err, "Error should be returned when response is nil") + assert.Nil(t, result, "HAR should be nil when response is nil") +} + +func TestConverter_GivenStatusCode_WhenConvertingHTTPResponse_ThenStatusShouldBeCorrect(t *testing.T) { + resp := createResponse(t, RESP_OK_CODE, nil, "") + + result, err := converter.FromHTTPResponse(resp) + require.NoError(t, err) + + assert.Equal(t, int64(resp.StatusCode), result.Status, "HAR status <> response status") +} + +func TestConverter_GivenStatusText_WhenConvertingHTTPResponse_ThenStatusTextShouldBeCorrect(t *testing.T) { + resp := createResponse(t, RESP_OK_CODE, nil, "") + + result, err := converter.FromHTTPResponse(resp) + require.NoError(t, err) + + assert.Equal(t, http.StatusText(resp.StatusCode), result.StatusText, "HAR status text <> response status text") +} + +func TestConverter_GivenProtocol_WhenConvertingHTTPResponse_ThenProtocolShouldBeCorrect(t *testing.T) { + resp := createResponse(t, RESP_OK_CODE, nil, "") + + result, err := converter.FromHTTPResponse(resp) + require.NoError(t, err) + + assert.Equal(t, resp.Proto, result.HTTPVersion, "HAR protocol <> response protocol") +} + +func TestConverter_GivenCookies_WhenConvertingHTTPResponse_ThenCookiesShouldBeCorrect(t *testing.T) { + resp := createResponse(t, RESP_OK_CODE, nil, "") + + result, err := converter.FromHTTPResponse(resp) + require.NoError(t, err) + + exceptedExpires, _ := time.Parse(time.RFC1123, RESP_COOKIE_EXPIRES) + exceptedExpiresStr := exceptedExpires.Format(time.RFC3339Nano) + assert.Len(t, result.Cookies, 1, "HAR should contain 1 cookie") + assert.Equal(t, RESP_COOKIE_NAME, result.Cookies[0].Name, "HAR cookie name <> response cookie name") + assert.Equal(t, RESP_COOKIE_VALUE, result.Cookies[0].Value, "HAR cookie value <> response cookie value") + assert.Equal(t, RESP_COOKIE_PATH, result.Cookies[0].Path, "HAR cookie path <> response cookie path") + assert.Equal(t, RESP_COOKIE_DOMAIN, result.Cookies[0].Domain, "HAR cookie domain <> response cookie domain") + assert.Equal(t, exceptedExpiresStr, result.Cookies[0].Expires, "HAR cookie expires <> response cookie expires") + assert.Equal(t, RESP_COOKIE_HTTPONLY, result.Cookies[0].HTTPOnly, "HAR cookie httpOnly <> response cookie httpOnly") + assert.Equal(t, RESP_COOKIE_SECURE, result.Cookies[0].Secure, "HAR cookie secure <> response cookie secure") +} + +func TestConverter_GivenHeaders_WhenConvertingHTTPResponse_ThenHeadersShouldBeCorrect(t *testing.T) { + resp := createResponse(t, RESP_OK_CODE, nil, "") + + result, err := converter.FromHTTPResponse(resp) + require.NoError(t, err) + + assert.Len(t, result.Headers, 2, "HAR should contain 2 header") + assert.Equal(t, RESP_HEADER_NAME, result.Headers[0].Name, "HAR header name <> response header name") + assert.Equal(t, RESP_HEADER_VALUE, result.Headers[0].Value, "HAR header value <> response header value") +} + +func TestConverter_GivenBody_WhenConvertingHTTPResponse_ThenContentShouldBeCorrect(t *testing.T) { + resp := createResponse(t, RESP_OK_CODE, bytes.NewBufferString(RESP_BODY_TEXT), RESP_CONTENT_TYPE) + + result, err := converter.FromHTTPResponse(resp) + require.NoError(t, err) + + assert.Equal(t, int64(len(RESP_BODY_TEXT)), result.Content.Size, "HAR content size <> response body size") + assert.Equal(t, RESP_CONTENT_TYPE, result.Content.MimeType, "HAR content mime type <> response content mime type") + assert.Equal(t, RESP_BODY_TEXT, result.Content.Text, "HAR content text <> response body text") +} + +func TestConverter_GivenRedirect_WhenConvertingHTTPResponse_ThenRedirectURLShouldBeCorrect(t *testing.T) { + resp := createResponse(t, RESP_FOUND_CODE, nil, "") + + result, err := converter.FromHTTPResponse(resp) + require.NoError(t, err) + + assert.Equal(t, RESP_LOCATION, *result.RedirectURL, "HAR redirect URL <> response location header") +} + +func TestConverter_GivenBody_WhenConvertingHTTPResponse_ThenBodySizeShouldBeCorrect(t *testing.T) { + resp := createResponse(t, RESP_OK_CODE, bytes.NewBufferString(RESP_BODY_TEXT), RESP_CONTENT_TYPE) + + result, err := converter.FromHTTPResponse(resp) + require.NoError(t, err) + + assert.Equal(t, int64(len(RESP_BODY_TEXT)), result.BodySize, "HAR body size <> response body size") +} + +func TestConverter_GivenEmptyBody_WhenConvertingHTTPResponse_ThenContentLengthShouldNotBeSet(t *testing.T) { + resp := createResponse(t, RESP_OK_CODE, bytes.NewBufferString(""), RESP_CONTENT_TYPE) + + result, err := converter.FromHTTPResponse(resp) + require.NoError(t, err) + + assert.Len(t, result.Headers, 3, "HAR should contain 3 header") +} + +func TestConverter_GivenBody_WhenConvertingHTTPResponse_ThenContentLengthShouldBeSet(t *testing.T) { + resp := createResponse(t, RESP_OK_CODE, bytes.NewBufferString(RESP_BODY_TEXT), RESP_CONTENT_TYPE) + + result, err := converter.FromHTTPResponse(resp) + require.NoError(t, err) + + assert.Len(t, result.Headers, 4, "HAR should contain 4 header") + assert.Equal(t, RESP_JSON_CONTENT_LENGTH_VALUE, result.Headers[3].Value, "HAR content length <> response content length") +} + +func createResponse(t *testing.T, statusCode int, body io.Reader, contentType string) *http.Response { + var buf []byte + var err error + if body != nil { + buf, err = io.ReadAll(body) + require.NoError(t, err) + } + + resp := &http.Response{ + StatusCode: statusCode, + Status: http.StatusText(statusCode), + Proto: RESP_PROTOCOL, + Header: make(http.Header), + ContentLength: int64(len(buf)), + Body: io.NopCloser(bytes.NewBuffer(buf)), + } + + cookie := RESP_COOKIE_NAME + "=" + RESP_COOKIE_VALUE + cookie += ";path=" + RESP_COOKIE_PATH + cookie += ";domain=" + RESP_COOKIE_DOMAIN + cookie += ";expires=" + RESP_COOKIE_EXPIRES + cookie += ";httponly" + cookie += ";secure" + + resp.Header.Append(RESP_HEADER_NAME, RESP_HEADER_VALUE) + resp.Header.Append(converter.SetCookieKey, cookie) + + if contentType != "" { + resp.Header.Append(converter.ContentTypeKey, contentType) + } + + if len(buf) > 0 { + resp.Header.Append(converter.ContentLengthKey, RESP_JSON_CONTENT_LENGTH_VALUE) + } + + if statusCode >= 300 && statusCode < 400 { + resp.Header.Append(converter.LocationKey, RESP_LOCATION) + } + + return resp +} diff --git a/example/go.mod b/example/go.mod new file mode 100644 index 0000000..71a09b6 --- /dev/null +++ b/example/go.mod @@ -0,0 +1,30 @@ +module github.com/Mathious6/example-harkit + +go 1.24.3 + +require ( + github.com/Mathious6/harkit v0.1.1 + github.com/Mathious6/httpkit v0.0.0-20250723215844-55768b698fa7 + github.com/bogdanfinn/fhttp v0.6.0 +) + +require ( + github.com/Dharmey747/quic-go-utls v1.0.3-utls // indirect + github.com/Mathious6/platekit v1.0.0 // indirect + github.com/andybalholm/brotli v1.1.1 // indirect + github.com/bogdanfinn/utls v1.7.3-barnius // indirect + github.com/cloudflare/circl v1.6.1 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/quic-go/qpack v0.5.1 // indirect + github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5 // indirect + go.uber.org/mock v0.5.0 // indirect + golang.org/x/crypto v0.37.0 // indirect + golang.org/x/mod v0.18.0 // indirect + golang.org/x/net v0.39.0 // indirect + golang.org/x/sync v0.13.0 // indirect + golang.org/x/sys v0.32.0 // indirect + golang.org/x/text v0.24.0 // indirect + golang.org/x/tools v0.22.0 // indirect +) + +replace github.com/Mathious6/harkit => ../ diff --git a/example/go.sum b/example/go.sum new file mode 100644 index 0000000..ca72a52 --- /dev/null +++ b/example/go.sum @@ -0,0 +1,46 @@ +github.com/Dharmey747/quic-go-utls v1.0.3-utls h1:wqvk69LgFwT6AtTW14ASpRIJkvCaH21dX/U5ov5STr0= +github.com/Dharmey747/quic-go-utls v1.0.3-utls/go.mod h1:lgQoyZzST8vJJQ84eF9Xi2xJJnujoiNk0FGFEyQonG8= +github.com/Mathious6/httpkit v0.0.0-20250723215844-55768b698fa7 h1:hT8GzwGDjNXxOcaZiYI0gcdwo3h4ylDht5JOAzKAZkA= +github.com/Mathious6/httpkit v0.0.0-20250723215844-55768b698fa7/go.mod h1:ZtgfNJPBEngOGIG/rC30Ei1bWb52doOa1qHeX1NTNNg= +github.com/Mathious6/platekit v1.0.0 h1:XoS0C1KiMKZem8mvD2VE1fvwS/1DT9Vc9u/dVdV69zY= +github.com/Mathious6/platekit v1.0.0/go.mod h1:bMXoVaS2ziTocqDm4rzMFuC8eiUFYpUIa+hXHymt8i8= +github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= +github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= +github.com/bogdanfinn/fhttp v0.6.0 h1:24JoDnE43tq3RdK99K1M5mxa2JyntKr6WcDsy1KdA0o= +github.com/bogdanfinn/fhttp v0.6.0/go.mod h1:ZR1hRfxsOd/j/C8RnwyNXA90DxkrHB3Y1nuCD1YlbdI= +github.com/bogdanfinn/utls v1.7.3-barnius h1:2p9riIoGHI85eVDebhHm58qLokyJ8bFEn26wg24S1uU= +github.com/bogdanfinn/utls v1.7.3-barnius/go.mod h1:SUn0CoHGVp/akGNuaqh99yvovu64PCP2LbWd3Z/Laic= +github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0= +github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/quic-go/qpack v0.5.1 h1:giqksBPnT/HDtZ6VhtFKgoLOWmlyo9Ei6u9PqzIMbhI= +github.com/quic-go/qpack v0.5.1/go.mod h1:+PC4XFrEskIVkcLzpEkbLqq1uCoxPhQuvK5rH1ZgaEg= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5 h1:YqAladjX7xpA6BM04leXMWAEjS0mTZ5kUU9KRBriQJc= +github.com/tam7t/hpkp v0.0.0-20160821193359-2b70b4024ed5/go.mod h1:2JjD2zLQYH5HO74y5+aE3remJQvl6q4Sn6aWA2wD1Ng= +github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU= +github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E= +go.uber.org/mock v0.5.0 h1:KAMbZvZPyBPWgD14IrIQ38QCyjwpvVVV6K/bHl1IwQU= +go.uber.org/mock v0.5.0/go.mod h1:ge71pBPLYDk7QIi1LupWxdAykm7KIEFchiOqd6z7qMM= +golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= +golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= +golang.org/x/mod v0.18.0 h1:5+9lSbEzPSdWkH32vYPBwEpX8KwDbM52Ud9xBUvNlb0= +golang.org/x/mod v0.18.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= +golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= +golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= +golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= +golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= +golang.org/x/tools v0.22.0 h1:gqSGLZqv+AI9lIQzniJ0nZDRG5GBPsSi+DRNHWNz6yA= +golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/example/main.go b/example/main.go new file mode 100644 index 0000000..606ad11 --- /dev/null +++ b/example/main.go @@ -0,0 +1,159 @@ +package main + +import ( + "fmt" + "net" + "net/url" + "strings" + "time" + + "github.com/Mathious6/harkit/harhandler" + "github.com/Mathious6/httpkit" + http "github.com/bogdanfinn/fhttp" +) + +const ( + URL = "https://httpbin.org" + PROXY_HOST = "host.docker.internal" + PROXY_PORT = "8888" +) + +func main() { + opts := []httpkit.HttpClientOption{ + httpkit.WithCookieJar(httpkit.NewCookieJar()), + httpkit.WithNotFollowRedirects(), + } + + if isProxyRunning(net.JoinHostPort(PROXY_HOST, PROXY_PORT), 100*time.Millisecond) { + opts = append(opts, httpkit.WithCharlesProxy(PROXY_HOST, PROXY_PORT)) + fmt.Println("Using Charles proxy.") + } else { + fmt.Println("Charles proxy not running, using direct connection.") + } + + client, err := httpkit.NewHttpClient(httpkit.NewNoopLogger(), opts...) + if err != nil { + panic(err) + } + + sendGetRequestWithQueryParams(client) + sendGetRequestWithSetCookies(client) + sendPostRequestWithForm(client) + sendPostRequestWithJSON(client) + + harhandler.Export(client.GetFlowId(), "example.har") +} + +func sendGetRequestWithQueryParams(client httpkit.HttpClient) { + req, _ := http.NewRequest(http.MethodGet, URL+"/get?name=pierre&role=developer", nil) + req.Header.Add("Accept", "*/*") + req.Header.Add("User-Agent", "harkit-example") + req.Header.Add("Accept-Encoding", "gzip, deflate, br") + + req.Header.Add(http.HeaderOrderKey, "accept") + req.Header.Add(http.HeaderOrderKey, "user-agent") + req.Header.Add(http.HeaderOrderKey, "accept-encoding") + + sentAt := time.Now() + resp, err := client.Do(req) + if err != nil { + panic(err) + } + defer resp.Body.Close() + harhandler.AddEntry(client.GetFlowId(), client.GetProxy(), sentAt, req, resp) + + fmt.Println("Parameters sent.") +} + +func sendGetRequestWithSetCookies(client httpkit.HttpClient) { + req, _ := http.NewRequest(http.MethodGet, URL+"/cookies/set?name=pierre&role=developer", nil) + req.Header.Add("Accept", "*/*") + req.Header.Add("User-Agent", "harkit-example") + req.Header.Add("Accept-Encoding", "gzip, deflate, br") + + req.Header.Add(http.HeaderOrderKey, "accept") + req.Header.Add(http.HeaderOrderKey, "user-agent") + req.Header.Add(http.HeaderOrderKey, "accept-encoding") + + sentAt := time.Now() + resp, err := client.Do(req) + if err != nil { + panic(err) + } + defer resp.Body.Close() + harhandler.AddEntry(client.GetFlowId(), client.GetProxy(), sentAt, req, resp) + + fmt.Println("Cookies set.") +} + +func sendPostRequestWithForm(client httpkit.HttpClient) { + form := url.Values{} + form.Set("name", "Pierre") + form.Set("role", "developer") + body := strings.NewReader(form.Encode()) + + req, _ := http.NewRequest(http.MethodPost, URL+"/post", body) + req.Header.Add("Accept", "*/*") + req.Header.Add("Content-Type", "application/x-www-form-urlencoded") + req.Header.Add("User-Agent", "harkit-example") + req.Header.Add("Accept-Encoding", "gzip, deflate, br") + + req.AddCookie(&http.Cookie{Name: "example", Value: "cookie"}) + + req.Header.Add(http.HeaderOrderKey, "accept") + req.Header.Add(http.HeaderOrderKey, "content-length") + req.Header.Add(http.HeaderOrderKey, "content-type") + req.Header.Add(http.HeaderOrderKey, "cookie") + req.Header.Add(http.HeaderOrderKey, "user-agent") + req.Header.Add(http.HeaderOrderKey, "accept-encoding") + + sentAt := time.Now() + resp, err := client.Do(req) + if err != nil { + panic(err) + } + defer resp.Body.Close() + harhandler.AddEntry(client.GetFlowId(), client.GetProxy(), sentAt, req, resp) + + fmt.Println("Form URL-encoded request sent.") +} + +func sendPostRequestWithJSON(client httpkit.HttpClient) { + jsonBody := `{"name":"Pierre","role":"developer"}` + body := strings.NewReader(jsonBody) + + req, _ := http.NewRequest(http.MethodPost, URL+"/post", body) + req.Header.Add("Accept", "*/*") + req.Header.Add("Content-Type", "application/json") + req.Header.Add("User-Agent", "harkit-example") + req.Header.Add("Accept-Encoding", "gzip, deflate, br") + + req.AddCookie(&http.Cookie{Name: "example", Value: "cookie"}) + + req.Header.Add(http.HeaderOrderKey, "accept") + req.Header.Add(http.HeaderOrderKey, "content-length") + req.Header.Add(http.HeaderOrderKey, "content-type") + req.Header.Add(http.HeaderOrderKey, "cookie") + req.Header.Add(http.HeaderOrderKey, "user-agent") + req.Header.Add(http.HeaderOrderKey, "accept-encoding") + + sentAt := time.Now() + resp, err := client.Do(req) + if err != nil { + panic(err) + } + defer resp.Body.Close() + harhandler.AddEntry(client.GetFlowId(), client.GetProxy(), sentAt, req, resp) + + fmt.Println("JSON request sent.") +} + +// isProxyRunning checks if a proxy is running on the given address and port. +func isProxyRunning(address string, timeout time.Duration) bool { + conn, err := net.DialTimeout("tcp", address, timeout) + if err != nil { + return false + } + defer conn.Close() + return true +} diff --git a/go.mod b/go.mod index d384985..b975c3d 100644 --- a/go.mod +++ b/go.mod @@ -1,3 +1,23 @@ module github.com/Mathious6/harkit go 1.24.2 + +require ( + github.com/bogdanfinn/fhttp v0.5.36 + github.com/stretchr/testify v1.10.0 +) + +require ( + github.com/andybalholm/brotli v1.1.1 // indirect + github.com/bogdanfinn/utls v1.6.5 // indirect + github.com/cloudflare/circl v1.5.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/klauspost/compress v1.17.11 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/quic-go/quic-go v0.48.1 // indirect + golang.org/x/crypto v0.29.0 // indirect + golang.org/x/net v0.31.0 // indirect + golang.org/x/sys v0.27.0 // indirect + golang.org/x/text v0.20.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..4a53819 --- /dev/null +++ b/go.sum @@ -0,0 +1,34 @@ +github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= +github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= +github.com/bogdanfinn/fhttp v0.5.36 h1:t1sO/EkO4K40QD/Ti8f6t80leZIdh2AaeLfN7dMvjH8= +github.com/bogdanfinn/fhttp v0.5.36/go.mod h1:BlcawVfXJ4uhk5yyNGOOY2bwo8UmMi6ccMszP1KGLkU= +github.com/bogdanfinn/utls v1.6.5 h1:rVMQvhyN3zodLxKFWMRLt19INGBCZ/OM2/vBWPNIt1w= +github.com/bogdanfinn/utls v1.6.5/go.mod h1:czcHxHGsc1q9NjgWSeSinQZzn6MR76zUmGVIGanSXO0= +github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys= +github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= +github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/quic-go/quic-go v0.48.1 h1:y/8xmfWI9qmGTc+lBr4jKRUWLGSlSigv847ULJ4hYXA= +github.com/quic-go/quic-go v0.48.1/go.mod h1:yBgs3rWBOADpga7F+jJsb6Ybg1LSYiQvwWlLX+/6HMs= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU= +github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= +golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 h1:vr/HnozRka3pE4EsMEg1lgkXJkTFJCVUX+S/ZT6wYzM= +golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842/go.mod h1:XtvwrStGgqGPLc4cjQfWqZHG1YFdYs6swckp8vpsjnc= +golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= +golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/harfile/har.go b/harfile/har.go index 446a4a3..9d1ff99 100644 --- a/harfile/har.go +++ b/harfile/har.go @@ -1,12 +1,29 @@ // Package harfile provides types for working with HAR (HTTP Archive) 1.2 files. +// // See: http://www.softwareishard.com/blog/har-12-spec/ package harfile -import "time" +import ( + "encoding/json" + "os" + "time" +) + +const ( + HARVersion = "1.2" // HARVersion is the version of the HAR format + HARTimeLayout = "2006-01-02T15:04:05.000-07:00" // REF: "Mon Jan 2 15:04:05 MST 2006" +) + +// HARTime is a wrapper around time.Time that formats the time in the HAR format. +type HARTime time.Time + +func (ht HARTime) MarshalJSON() ([]byte, error) { + return []byte(`"` + time.Time(ht).Format(HARTimeLayout) + `"`), nil +} // HAR parent container for log. type HAR struct { - Log *Log `json:"log"` // + Log *Log `json:"log"` // Log represents the root of exported data. } // Log represents the root of exported data. @@ -35,34 +52,34 @@ type Browser struct { // Pages represents list of exported pages. type Page struct { - StartedDateTime time.Time `json:"startedDateTime"` // Date and time stamp for the beginning of the page load (ISO 8601 - YYYY-MM-DDThh:mm:ss.sTZD, e.g. 2009-07-24T19:20:30.45+01:00). + StartedDateTime HARTime `json:"startedDateTime"` // Date and time stamp for the beginning of the page load (ISO 8601 - YYYY-MM-DDThh:mm:ss.sTZD, e.g. 2009-07-24T19:20:30.45+01:00). ID string `json:"id"` // Unique identifier of a page within the [log]. Entries use it to refer the parent page. Title string `json:"title"` // Page title. PageTimings *PageTimings `json:"pageTimings"` // Detailed timing info about page load. Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. } -// PageTimings describes timings for various events (states) fired during the -// page load. All times are specified in milliseconds. If a time info is not -// available appropriate field is set to -1. +// PageTimings describes timings for various events (states) fired during the page load. All times +// are specified in milliseconds. If a time info is not available appropriate field is set to -1. type PageTimings struct { OnContentLoad float64 `json:"onContentLoad,omitempty,omitzero"` // Content of the page loaded. Number of milliseconds since page load started (page.startedDateTime). Use -1 if the timing does not apply to the current request. OnLoad float64 `json:"onLoad,omitempty,omitzero"` // Page is loaded (onLoad event fired). Number of milliseconds since page load started (page.startedDateTime). Use -1 if the timing does not apply to the current request. Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. } -// Entry represents an array with all exported HTTP requests. Sorting entries -// by startedDateTime (starting from the oldest) is preferred way how to export -// data since it can make importing faster. However the reader application -// should always make sure the array is sorted (if required for the import). +// Entry represents an array with all exported HTTP requests. Sorting entries by startedDateTime +// (starting from the oldest) is preferred way how to export data since it can make importing +// faster. However the reader application should always make sure the array is sorted (if required +// for the import). type Entry struct { Pageref string `json:"pageref,omitempty"` // Reference to the parent page. Leave out this field if the application does not support grouping by pages. - StartedDateTime time.Time `json:"startedDateTime"` // Date and time stamp of the request start (ISO 8601 - YYYY-MM-DDThh:mm:ss.sTZD). + StartedDateTime HARTime `json:"startedDateTime"` // Date and time stamp of the request start (ISO 8601 - YYYY-MM-DDThh:mm:ss.sTZD). Time float64 `json:"time"` // Total elapsed time of the request in milliseconds. This is the sum of all timings available in the timings object (i.e. not including -1 values) . Request *Request `json:"request"` // Detailed info about the request. Response *Response `json:"response"` // Detailed info about the response. Cache *Cache `json:"cache"` // Info about cache usage. Timings *Timings `json:"timings"` // Detailed timing info about request/response round trip. + ClientProxy string `json:"clientProxy,omitempty"` // NEW: Proxy used by the client to connect to the server. ServerIPAddress string `json:"serverIPAddress,omitempty"` // IP address of the server that was connected (result of DNS resolution). Connection string `json:"connection,omitempty"` // Unique ID of the parent TCP/IP connection, can be the client or server port number. Note that a port number doesn't have to be unique identifier in cases where the port is shared for more connections. If the port isn't available for the application, any other unique connection ID can be used instead (e.g. connection index). Leave out this field if the application doesn't support this info. Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. @@ -70,57 +87,57 @@ type Entry struct { // Request contains detailed info about performed request. type Request struct { - Method string `json:"method"` // Request method (GET, POST, ...). - URL string `json:"url"` // Absolute URL of the request (fragments are not included). - HTTPVersion string `json:"httpVersion"` // Request HTTP Version. - Cookies []*Cookie `json:"cookies"` // List of cookie objects. - Headers []*NameValuePair `json:"headers"` // List of header objects. - QueryString []*NameValuePair `json:"queryString"` // List of query parameter objects. - PostData *PostData `json:"postData,omitempty"` // Posted data info. - HeadersSize int64 `json:"headersSize"` // Total number of bytes from the start of the HTTP request message until (and including) the double CRLF before the body. Set to -1 if the info is not available. - BodySize int64 `json:"bodySize"` // Size of the request body (POST data payload) in bytes. Set to -1 if the info is not available. - Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. + Method string `json:"method"` // Request method (GET, POST, ...). + URL string `json:"url"` // Absolute URL of the request (fragments are not included). + HTTPVersion string `json:"httpVersion"` // Request HTTP Version. + Cookies []*Cookie `json:"cookies"` // List of cookie objects. + Headers []*NVPair `json:"headers"` // List of header objects. + QueryString []*NVPair `json:"queryString"` // List of query parameter objects. + PostData *PostData `json:"postData,omitempty"` // Posted data info. + HeadersSize int64 `json:"headersSize"` // Total number of bytes from the start of the HTTP request message until (and including) the double CRLF before the body. Set to -1 if the info is not available. + BodySize int64 `json:"bodySize"` // Size of the request body (POST data payload) in bytes. Set to -1 if the info is not available. + Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. } // Response contains detailed info about the response. type Response struct { - Status int64 `json:"status"` // Response status. - StatusText string `json:"statusText"` // Response status description. - HTTPVersion string `json:"httpVersion"` // Response HTTP Version. - Cookies []*Cookie `json:"cookies"` // List of cookie objects. - Headers []*NameValuePair `json:"headers"` // List of header objects. - Content *Content `json:"content"` // Details about the response body. - RedirectURL string `json:"redirectURL"` // Redirection target URL from the Location response header. - HeadersSize int64 `json:"headersSize"` // Total number of bytes from the start of the HTTP response message until (and including) the double CRLF before the body. Set to -1 if the info is not available. - BodySize int64 `json:"bodySize"` // Size of the received response body in bytes. Set to zero in case of responses coming from the cache (304). Set to -1 if the info is not available. - Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. -} - -// Cookie contains list of all cookies (used in [Request] and [Response] -// objects). + Status int64 `json:"status"` // Response status. + StatusText string `json:"statusText"` // Response status description. + HTTPVersion string `json:"httpVersion"` // Response HTTP Version. + Cookies []*Cookie `json:"cookies"` // List of cookie objects. + Headers []*NVPair `json:"headers"` // List of header objects. + Content *Content `json:"content"` // Details about the response body. + RedirectURL *string `json:"redirectURL"` // Redirection target URL from the Location response header. + HeadersSize int64 `json:"headersSize"` // Total number of bytes from the start of the HTTP response message until (and including) the double CRLF before the body. Set to -1 if the info is not available. + BodySize int64 `json:"bodySize"` // Size of the received response body in bytes. Set to zero in case of responses coming from the cache (304). Set to -1 if the info is not available. + Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. +} + +// Cookie contains list of all cookies (used in [Request] and [Response] objects). type Cookie struct { - Name string `json:"name"` // The name of the cookie. - Value string `json:"value"` // The cookie value. - Path string `json:"path,omitempty"` // The path pertaining to the cookie. - Domain string `json:"domain,omitempty"` // The host of the cookie. - Expires string `json:"expires,omitempty"` // Cookie expiration time. (ISO 8601 - YYYY-MM-DDThh:mm:ss.sTZD, e.g. 2009-07-24T19:20:30.123+02:00). - HTTPOnly bool `json:"httpOnly"` // Set to true if the cookie is HTTP only, false otherwise. - Secure bool `json:"secure"` // True if the cookie was transmitted over ssl, false otherwise. - Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. -} - -// NameValuePair describes a name/value pair. -type NameValuePair struct { + Name string `json:"name"` // The name of the cookie. + Value string `json:"value"` // The cookie value. + Path string `json:"path,omitempty"` // The path pertaining to the cookie. + Domain string `json:"domain,omitempty"` // The host of the cookie. + Expires string `json:"expires,omitempty"` // Cookie expiration time. (ISO 8601 - YYYY-MM-DDThh:mm:ss.sTZD, e.g. 2009-07-24T19:20:30.123+02:00). + HTTPOnly bool `json:"httpOnly,omitempty"` // Set to true if the cookie is HTTP only, false otherwise. + Secure bool `json:"secure,omitempty"` // True if the cookie was transmitted over ssl, false otherwise. + Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. +} + +// NVPair describes a name/value pair. +type NVPair struct { Name string `json:"name"` // Name of the pair. Value string `json:"value"` // Value of the pair. Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. } -// PostData describes posted data, if any (embedded in [Request] object). +// PostData describes posted data, if any (embedded in [Request] object). Text and params fields are +// mutually exclusive. type PostData struct { MimeType string `json:"mimeType"` // Mime type of posted data. - Params []*Param `json:"params"` // List of posted parameters (in case of URL encoded parameters). - Text string `json:"text"` // Plain text posted data + Params []*Param `json:"params,omitempty"` // List of posted parameters (in case of URL encoded parameters). + Text string `json:"text,omitempty"` // Plain text posted data Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. } @@ -133,8 +150,7 @@ type Param struct { Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. } -// Content describes details about response content (embedded in [Response] -// object). +// Content describes details about response content (embedded in [Response] object). type Content struct { Size int64 `json:"size"` // Length of the returned content in bytes. Should be equal to response.bodySize if there is no compression and bigger when the content has been compressed. Compression int64 `json:"compression,omitempty"` // Number of bytes saved. Leave out this field if the information is not available. @@ -160,8 +176,8 @@ type CacheData struct { Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. } -// Timings describes various phases within request-response round trip. All -// times are specified in milliseconds. +// Timings describes various phases within request-response round trip. All times are specified in +// milliseconds. type Timings struct { Blocked float64 `json:"blocked,omitempty,omitzero"` // Time spent in a queue waiting for a network connection. Use -1 if the timing does not apply to the current request. DNS float64 `json:"dns,omitempty,omitzero"` // DNS resolution time. The time required to resolve a host name. Use -1 if the timing does not apply to the current request. @@ -172,3 +188,35 @@ type Timings struct { Ssl float64 `json:"ssl,omitempty,omitzero"` // Time required for SSL/TLS negotiation. If this field is defined then the time is also included in the connect field (to ensure backward compatibility with HAR 1.1). Use -1 if the timing does not apply to the current request. Comment string `json:"comment,omitempty"` // A comment provided by the user or the application. } + +// Total returns the total time of the request/response round trip. Ignoring -1 values which +// indicate that the timing does not apply to the current request. +func (t *Timings) Total() float64 { + sum := 0.0 + for _, v := range []float64{t.Blocked, t.DNS, t.Connect, t.Send, t.Wait, t.Receive, t.Ssl} { + if v > 0 { + sum += v + } + } + return sum +} + +// Save saves the HAR data to a file in JSON format under the specified filename. +// It uses the json.NewEncoder to encode without escaping HTML. +func (h *HAR) Save(filename string) error { + file, err := os.Create(filename) + if err != nil { + return err + } + defer file.Close() + + enc := json.NewEncoder(file) + enc.SetEscapeHTML(false) + enc.SetIndent("", " ") + + if err := enc.Encode(h); err != nil { + return err + } + + return nil +} diff --git a/harhandler/har_handler.go b/harhandler/har_handler.go new file mode 100644 index 0000000..b40a17b --- /dev/null +++ b/harhandler/har_handler.go @@ -0,0 +1,146 @@ +package harhandler + +import ( + "context" + "fmt" + "net" + "net/url" + "sync" + "time" + + "github.com/Mathious6/harkit" + "github.com/Mathious6/harkit/converter" + "github.com/Mathious6/harkit/harfile" + http "github.com/bogdanfinn/fhttp" +) + +var ( + globalHarStorage = make(map[string]*HARHandler) // globalHarStorage is storing all the HARHandlers for all the flows + globalHarStorageMutex = sync.Mutex{} // globalHarStorageMutex is used to synchronize access to the globalHarStorage map +) + +// HARHandler is the main struct that stores the HAR data for a flow +type HARHandler struct { + har *harfile.HAR // har is the HAR data for the flow + resolveIPAddress bool // resolveIPAddress is a flag to resolve the IP address of the server +} + +// CreateHandler creates a new HARHandler for a flow with the given flowID +func CreateHandler(flowID string, opts ...HandlerOption) (*HARHandler, error) { + globalHarStorageMutex.Lock() + defer globalHarStorageMutex.Unlock() + if _, exists := globalHarStorage[flowID]; exists { + return nil, fmt.Errorf("handler %q already exists", flowID) + } + handler := newHARHandler(flowID, opts...) + globalHarStorage[flowID] = handler + return handler, nil +} + +// GetHandler gets the HARHandler for a flow with the given flowID +func GetHandler(flowID string) (*HARHandler, error) { + globalHarStorageMutex.Lock() + defer globalHarStorageMutex.Unlock() + handler, exists := globalHarStorage[flowID] + if !exists { + return nil, fmt.Errorf("handler %q not found", flowID) + } + return handler, nil +} + +// GetOrCreateHandler gets the HARHandler for a flow with the given flowID, if it doesn't exist, it creates a new one +func GetOrCreateHandler(flowID string, opts ...HandlerOption) *HARHandler { + if handler, err := GetHandler(flowID); err == nil { + for _, opt := range opts { + opt(handler) + } + return handler + } + handler, _ := CreateHandler(flowID, opts...) + return handler +} + +// newHARHandler creates a new HARHandler for a flow with the given flowID and applies the given options +func newHARHandler(flowID string, opts ...HandlerOption) *HARHandler { + h := &HARHandler{ + har: &harfile.HAR{ + Log: &harfile.Log{ + Version: harfile.HARVersion, + Creator: &harfile.Creator{ + Name: flowID, + Version: fmt.Sprintf("harkit-%s", harkit.Version), + }, + Entries: []*harfile.Entry{}, + }, + }, + } + for _, opt := range opts { + opt(h) + } + return h +} + +// AddEntry adds a new entry to the HARHandler for a flow with the given flowID, sentAt, request, and response +func AddEntry(flowId string, proxy string, sentAt time.Time, req *http.Request, resp *http.Response) error { + return GetOrCreateHandler(flowId).AddEntry(proxy, sentAt, req, resp) +} + +// Export exports the HAR data for a flow with the given flowID and filename +func Export(flowId, filename string) error { + handler := GetOrCreateHandler(flowId) + delete(globalHarStorage, flowId) + return handler.har.Save(filename) +} + +// AddEntry adds a new entry to the HARHandler for a flow with the given sentAt, request, and response +func (h *HARHandler) AddEntry(proxy string, sentAt time.Time, req *http.Request, resp *http.Response) error { + timingsReceive := float64(time.Since(sentAt).Milliseconds()) + + harReq, err := converter.FromHTTPRequest(req) + if err != nil { + return err + } + + harResp, err := converter.FromHTTPResponse(resp) + if err != nil { + return err + } + + timings := &harfile.Timings{ + Send: -1, + Wait: float64(time.Since(sentAt).Milliseconds()) - timingsReceive, + Receive: timingsReceive, + } + + h.har.Log.Entries = append(h.har.Log.Entries, &harfile.Entry{ + StartedDateTime: harfile.HARTime(sentAt), + Time: timings.Total(), + Request: harReq, + Response: harResp, + Cache: nil, + Timings: timings, + ClientProxy: proxy, + ServerIPAddress: resolveServerIPAddress(h.resolveIPAddress, harReq.URL), + }) + + return nil +} + +// resolveServerIPAddress performs a DNS lookup on the given URL and returns the first resolved +// IP address as a string. Returns an empty string on failure. This is a blocking operation. +func resolveServerIPAddress(resolve bool, rawURL string) string { + if !resolve { + return "0.0.0.0" + } + + parsedURL, err := url.Parse(rawURL) + if err != nil { + return "" + } + + ipAddrs, err := net.DefaultResolver.LookupIPAddr(context.Background(), parsedURL.Hostname()) + if err != nil || len(ipAddrs) == 0 { + return "" + } + return ipAddrs[0].IP.String() +} diff --git a/harhandler/har_handler_options.go b/harhandler/har_handler_options.go new file mode 100644 index 0000000..e8f9c64 --- /dev/null +++ b/harhandler/har_handler_options.go @@ -0,0 +1,11 @@ +package harhandler + +// HandlerOption is a function that can be used to configure a HARHandler +type HandlerOption func(*HARHandler) + +// WithServerIPAddress is a function that can be used to configure a HARHandler to resolve the IP address of the server +func WithServerIPAddress() HandlerOption { + return func(h *HARHandler) { + h.resolveIPAddress = true + } +} diff --git a/version.go b/version.go new file mode 100644 index 0000000..9d95010 --- /dev/null +++ b/version.go @@ -0,0 +1,3 @@ +package harkit + +var Version = "v1.0.0"