Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Reuse the same HTTP client for all requests. #130

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
182 changes: 95 additions & 87 deletions client/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -102,11 +102,43 @@ type Repo struct {
// RepoMap describes each repo's packages as seen from a client.
type RepoMap map[string]Repo

// Downloader is a wrapper around http.Client
type Downloader struct {
HTTPClient *http.Client
UsingProxyServer bool
}

// NewDownloader returns a Downloader optionally using a specified proxyServer.
func NewDownloader(proxyServer string) (*Downloader, error) {
httpClient := http.DefaultClient
proxy := http.ProxyFromEnvironment
if proxyServer != "" {
proxyURL, err := url.Parse(proxyServer)
if err != nil {
return nil, err
}
proxy = http.ProxyURL(proxyURL)
}
httpClient.Transport = &http.Transport{
Proxy: proxy,
DialContext: (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
}).DialContext,
ForceAttemptHTTP2: true,
MaxIdleConns: 100,
IdleConnTimeout: 60 * time.Second,
TLSHandshakeTimeout: 10 * time.Second,
ExpectContinueTimeout: 1 * time.Second,
}
return &Downloader{HTTPClient: httpClient, UsingProxyServer: proxyServer != ""}, nil
}

// AvailableVersions builds a RepoMap from a list of sources.
func AvailableVersions(ctx context.Context, srcs map[string]priority.Value, cacheDir string, cacheLife time.Duration, proxyServer string) RepoMap {
func (d *Downloader) AvailableVersions(ctx context.Context, srcs map[string]priority.Value, cacheDir string, cacheLife time.Duration) RepoMap {
rm := make(RepoMap)
for r, pri := range srcs {
rf, err := unmarshalRepoPackages(ctx, r, cacheDir, cacheLife, proxyServer)
rf, err := d.unmarshalRepoPackages(ctx, r, cacheDir, cacheLife)
if err != nil {
logger.Errorf("error reading repo %q: %v", r, err)
continue
Expand All @@ -119,56 +151,10 @@ func AvailableVersions(ctx context.Context, srcs map[string]priority.Value, cach
return rm
}

func decode(index io.ReadCloser, ct, url, cf string) ([]goolib.RepoSpec, error) {
defer index.Close()

var dec *json.Decoder
switch ct {
case "application/x-gzip":
gr, err := gzip.NewReader(index)
if err != nil {
return nil, err
}
dec = json.NewDecoder(gr)
case "application/json":
dec = json.NewDecoder(index)
default:
return nil, fmt.Errorf("unsupported content type: %s", ct)
}

var m []goolib.RepoSpec
for dec.More() {
if err := dec.Decode(&m); err != nil {
return nil, err
}
}

f, err := oswrap.Create(cf)
if err != nil {
return nil, err
}
j, err := json.Marshal(m)
if err != nil {
return nil, err
}
if _, err := f.Write(j); err != nil {
return nil, err
}

// The .url files aren't used by googet but help developers and the
// curious figure out which file belongs to which repo/URL.
mf := fmt.Sprintf("%s.url", strings.TrimSuffix(cf, filepath.Ext(cf)))
if err = ioutil.WriteFile(mf, []byte(url), 0644); err != nil {
logger.Errorf("Failed to write '%s': %v", mf, err)
}

return m, f.Close()
}

// unmarshalRepoPackages gets and unmarshals a repository URL or uses the cached contents
// if mtime is less than cacheLife.
// Successfully unmarshalled contents will be written to a cache.
func unmarshalRepoPackages(ctx context.Context, p, cacheDir string, cacheLife time.Duration, proxyServer string) ([]goolib.RepoSpec, error) {
func (d *Downloader) unmarshalRepoPackages(ctx context.Context, p, cacheDir string, cacheLife time.Duration) ([]goolib.RepoSpec, error) {
pName := strings.TrimPrefix(p, "oauth-")

cf := filepath.Join(cacheDir, fmt.Sprintf("%x.rs", sha256.Sum256([]byte(pName))))
Expand Down Expand Up @@ -196,34 +182,13 @@ func unmarshalRepoPackages(ctx context.Context, p, cacheDir string, cacheLife ti

isGCSURL, bucket, object := goolib.SplitGCSUrl(pName)
if isGCSURL {
return unmarshalRepoPackagesGCS(ctx, bucket, object, pName, cf, proxyServer)
return d.unmarshalRepoPackagesGCS(ctx, bucket, object, pName, cf)
}
return unmarshalRepoPackagesHTTP(ctx, p, cf, proxyServer)
return d.unmarshalRepoPackagesHTTP(ctx, p, cf)
}

// Get gets a url using an optional proxy server, retrying once on any error.
func Get(ctx context.Context, path, proxyServer string) (*http.Response, error) {
httpClient := http.DefaultClient
proxy := http.ProxyFromEnvironment
if proxyServer != "" {
proxyURL, err := url.Parse(proxyServer)
if err != nil {
return nil, err
}
proxy = http.ProxyURL(proxyURL)
}
httpClient.Transport = &http.Transport{
Proxy: proxy,
DialContext: (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
}).DialContext,
ForceAttemptHTTP2: true,
MaxIdleConns: 100,
IdleConnTimeout: 60 * time.Second,
TLSHandshakeTimeout: 10 * time.Second,
ExpectContinueTimeout: 1 * time.Second,
}
func (d *Downloader) Get(ctx context.Context, path string) (*http.Response, error) {
useOauth := strings.HasPrefix(path, "oauth-")
path = strings.TrimPrefix(path, "oauth-")
req, err := http.NewRequest(http.MethodGet, path, nil)
Expand All @@ -241,46 +206,43 @@ func Get(ctx context.Context, path, proxyServer string) (*http.Response, error)
}
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token.AccessToken))
}
resp, err := httpClient.Do(req)
resp, err := d.HTTPClient.Do(req)
// We retry on any error once as this mitigates some
// connection issues in certain situations.
if err == nil {
return resp, nil
}
return httpClient.Do(req)
return d.HTTPClient.Do(req)
}

func unmarshalRepoPackagesHTTP(ctx context.Context, repoURL string, cf string, proxyServer string) ([]goolib.RepoSpec, error) {
func (d *Downloader) unmarshalRepoPackagesHTTP(ctx context.Context, repoURL string, cf string) ([]goolib.RepoSpec, error) {
indexURL := repoURL + "/index.gz"
trimmedIndexURL := strings.TrimPrefix(indexURL, "oauth-")
ct := "application/x-gzip"
logger.Infof("Fetching %q", trimmedIndexURL)
res, err := Get(ctx, indexURL, proxyServer)
res, err := d.Get(ctx, indexURL)
if err != nil {
return nil, err
}

if err != nil || res.StatusCode != 200 {
//logger.Infof("Gzipped index returned status: %q, trying plain JSON.", res.Status)
if res.StatusCode != http.StatusOK {
indexURL = repoURL + "/index"
trimmedIndexURL = strings.TrimPrefix(indexURL, "oauth-")
ct = "application/json"
logger.Infof("Fetching %q", trimmedIndexURL)
res, err = Get(ctx, indexURL, proxyServer)
res, err = d.Get(ctx, indexURL)
if err != nil {
return nil, err
}

if res.StatusCode != 200 {
if res.StatusCode != http.StatusOK {
return nil, fmt.Errorf("index GET request returned status: %q", res.Status)
}
}

return decode(res.Body, ct, repoURL, cf)
}

func unmarshalRepoPackagesGCS(ctx context.Context, bucket, object, url, cf string, proxyServer string) ([]goolib.RepoSpec, error) {
if proxyServer != "" {
logger.Errorf("Proxy server not supported with gs:// URLs, skiping repo 'gs://%s/%s'", bucket, object)
func (d *Downloader) unmarshalRepoPackagesGCS(ctx context.Context, bucket, object, url, cf string) ([]goolib.RepoSpec, error) {
if d.UsingProxyServer {
logger.Errorf("Proxy server not supported with gs:// URLs, skipping repo 'gs://%s/%s'", bucket, object)
var empty []goolib.RepoSpec
return empty, nil
}
Expand Down Expand Up @@ -315,6 +277,52 @@ func unmarshalRepoPackagesGCS(ctx context.Context, bucket, object, url, cf strin
return decode(r, "application/json", url, cf)
}

func decode(index io.ReadCloser, ct, url, cf string) ([]goolib.RepoSpec, error) {
defer index.Close()

var dec *json.Decoder
switch ct {
case "application/x-gzip":
gr, err := gzip.NewReader(index)
if err != nil {
return nil, err
}
dec = json.NewDecoder(gr)
case "application/json":
dec = json.NewDecoder(index)
default:
return nil, fmt.Errorf("unsupported content type: %s", ct)
}

var m []goolib.RepoSpec
for dec.More() {
if err := dec.Decode(&m); err != nil {
return nil, err
}
}

f, err := oswrap.Create(cf)
if err != nil {
return nil, err
}
j, err := json.Marshal(m)
if err != nil {
return nil, err
}
if _, err := f.Write(j); err != nil {
return nil, err
}

// The .url files aren't used by googet but help developers and the
// curious figure out which file belongs to which repo/URL.
mf := fmt.Sprintf("%s.url", strings.TrimSuffix(cf, filepath.Ext(cf)))
if err = ioutil.WriteFile(mf, []byte(url), 0644); err != nil {
logger.Errorf("Failed to write '%s': %v", mf, err)
}

return m, f.Close()
}

// FindRepoSpec returns the RepoSpec in repo whose PackageSpec matches pi.
func FindRepoSpec(pi goolib.PackageInfo, repo Repo) (goolib.RepoSpec, error) {
for _, p := range repo.Packages {
Expand Down
18 changes: 15 additions & 3 deletions client/client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,11 @@ func TestUnmarshalRepoPackagesJSON(t *testing.T) {
}))
defer ts.Close()

got, err := unmarshalRepoPackages(context.Background(), ts.URL, tempDir, cacheLife, proxyServer)
d, err := NewDownloader(proxyServer)
if err != nil {
t.Fatalf("NewDownloader(%s): %v", proxyServer, err)
}
got, err := d.unmarshalRepoPackages(context.Background(), ts.URL, tempDir, cacheLife)
if err != nil {
t.Fatalf("Error running unmarshalRepoPackages: %v", err)
}
Expand Down Expand Up @@ -313,7 +317,11 @@ func TestUnmarshalRepoPackagesGzip(t *testing.T) {
}))
defer ts.Close()

got, err := unmarshalRepoPackages(context.Background(), ts.URL, tempDir, cacheLife, proxyServer)
d, err := NewDownloader(proxyServer)
if err != nil {
t.Fatalf("NewDownloader(%s): %v", proxyServer, err)
}
got, err := d.unmarshalRepoPackages(context.Background(), ts.URL, tempDir, cacheLife)
if err != nil {
t.Fatalf("Error running unmarshalRepoPackages: %v", err)
}
Expand Down Expand Up @@ -351,7 +359,11 @@ func TestUnmarshalRepoPackagesCache(t *testing.T) {
}

// No http server as this should use the cached content.
got, err := unmarshalRepoPackages(context.Background(), url, tempDir, cacheLife, proxyServer)
d, err := NewDownloader(proxyServer)
if err != nil {
t.Fatalf("NewDownloader(%s): %v", proxyServer, err)
}
got, err := d.unmarshalRepoPackages(context.Background(), url, tempDir, cacheLife)
if err != nil {
t.Fatalf("Error running unmarshalRepoPackages: %v", err)
}
Expand Down
35 changes: 14 additions & 21 deletions download/download.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import (
"errors"
"fmt"
"io"
"net/http"
"net/url"
"os"
"path"
Expand All @@ -37,46 +38,38 @@ import (
"github.com/google/logger"
)

const (
httpOK = 200
)

// Package downloads a package from the given url,
// the provided SHA256 checksum will be checked during download.
func Package(ctx context.Context, pkgURL, dst, chksum, proxyServer string) error {
func Package(ctx context.Context, pkgURL, dst, chksum string, downloader *client.Downloader) error {
if err := oswrap.RemoveAll(dst); err != nil {
return err
}

isGCSURL, bucket, object := goolib.SplitGCSUrl(pkgURL)
if isGCSURL {
return packageGCS(ctx, bucket, object, dst, chksum, "")
return packageGCS(ctx, bucket, object, dst, chksum)
}

return packageHTTP(ctx, pkgURL, dst, chksum, proxyServer)
return packageHTTP(ctx, pkgURL, dst, chksum, downloader)
}

// Downloads a package from an HTTP(s) server
func packageHTTP(ctx context.Context, pkgURL, dst, chksum string, proxyServer string) error {
resp, err := client.Get(ctx, pkgURL, proxyServer)
func packageHTTP(ctx context.Context, pkgURL, dst, chksum string, downloader *client.Downloader) error {
resp, err := downloader.Get(ctx, pkgURL)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != httpOK {
return fmt.Errorf("Invalid return code from server, got: %d, want: %d", resp.StatusCode, httpOK)
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("invalid return code from server, got: %d, want: %d", resp.StatusCode, http.StatusOK)
}

logger.Infof("Downloading %q", pkgURL)
return download(resp.Body, dst, chksum)
}

// Downloads a package from Google Cloud Storage
func packageGCS(ctx context.Context, bucket, object string, dst, chksum string, proxyServer string) error {
if proxyServer != "" {
return fmt.Errorf("Proxy server not supported with GCS URLs")
}

func packageGCS(ctx context.Context, bucket, object string, dst, chksum string) error {
client, err := storage.NewClient(ctx)
if err != nil {
return err
Expand All @@ -94,7 +87,7 @@ func packageGCS(ctx context.Context, bucket, object string, dst, chksum string,
}

// FromRepo downloads a package from a repo.
func FromRepo(ctx context.Context, rs goolib.RepoSpec, repo, dir string, proxyServer string) (string, error) {
func FromRepo(ctx context.Context, rs goolib.RepoSpec, repo, dir string, downloader *client.Downloader) (string, error) {
repoURL, err := url.Parse(repo)
if err != nil {
return "", err
Expand All @@ -114,11 +107,11 @@ func FromRepo(ctx context.Context, rs goolib.RepoSpec, repo, dir string, proxySe

pn := goolib.PackageInfo{Name: rs.PackageSpec.Name, Arch: rs.PackageSpec.Arch, Ver: rs.PackageSpec.Version}.PkgName()
dst := filepath.Join(dir, filepath.Base(pn))
return dst, Package(ctx, pkgURL.String(), dst, rs.Checksum, proxyServer)
return dst, Package(ctx, pkgURL.String(), dst, rs.Checksum, downloader)
}

// Latest downloads the latest available version of a package.
func Latest(ctx context.Context, name, dir string, rm client.RepoMap, archs []string, proxyServer string) (string, error) {
func Latest(ctx context.Context, name, dir string, rm client.RepoMap, archs []string, downloader *client.Downloader) (string, error) {
ver, repo, arch, err := client.FindRepoLatest(goolib.PackageInfo{Name: name, Arch: "", Ver: ""}, rm, archs)
if err != nil {
return "", err
Expand All @@ -127,7 +120,7 @@ func Latest(ctx context.Context, name, dir string, rm client.RepoMap, archs []st
if err != nil {
return "", err
}
return FromRepo(ctx, rs, repo, dir, proxyServer)
return FromRepo(ctx, rs, repo, dir, downloader)
}

func download(r io.Reader, dst, chksum string) (err error) {
Expand Down Expand Up @@ -159,7 +152,7 @@ func download(r io.Reader, dst, chksum string) (err error) {
}

// ExtractPkg takes a path to a package and extracts it to a directory based on the
// package name, it returns the path to the extraced directory.
// package name, it returns the path to the extracted directory.
func ExtractPkg(src string) (dst string, err error) {
dst = strings.TrimSuffix(src, filepath.Ext(src))
if src == "" || dst == "" {
Expand Down
Loading
Loading