From d284aa1a68d0e1d660b2151843167c5acba4b257 Mon Sep 17 00:00:00 2001 From: Jian Xiao Date: Tue, 18 Feb 2025 23:38:24 +0000 Subject: [PATCH] one api --- disperser/dataapi/docs/v2/V2_docs.go | 76 +------- disperser/dataapi/docs/v2/V2_swagger.json | 76 +------- disperser/dataapi/docs/v2/V2_swagger.yaml | 61 ++---- disperser/dataapi/v2/blobs.go | 224 +++++++++------------- disperser/dataapi/v2/server_v2.go | 3 +- disperser/dataapi/v2/server_v2_test.go | 29 +-- 6 files changed, 136 insertions(+), 333 deletions(-) diff --git a/disperser/dataapi/docs/v2/V2_docs.go b/disperser/dataapi/docs/v2/V2_docs.go index df24a47da1..bb44e5bcba 100644 --- a/disperser/dataapi/docs/v2/V2_docs.go +++ b/disperser/dataapi/docs/v2/V2_docs.go @@ -118,7 +118,7 @@ const docTemplateV2 = `{ } } }, - "/blobs/feed/backward": { + "/blobs/feed": { "get": { "produces": [ "application/json" @@ -126,86 +126,30 @@ const docTemplateV2 = `{ "tags": [ "Blobs" ], - "summary": "Fetch blob feed backward in time (newest to oldest)", + "summary": "Fetch blob feed in specified direction", "parameters": [ { "type": "string", - "description": "Fetch blobs before this time (ISO 8601 format) [default: now]", - "name": "before", - "in": "query" - }, - { - "type": "string", - "description": "Stop fetching at this time (ISO 8601 format) [default: now-1h]", - "name": "until", - "in": "query" + "description": "Direction to fetch: 'forward' or 'backward' [default: forward]", + "name": "direction", + "in": "query", + "required": true }, { "type": "string", - "description": "Pagination cursor for fetching older items; override before [default: empty]", - "name": "cursor", - "in": "query" - }, - { - "type": "integer", - "description": "Maximum number of blobs to fetch [default: 20; max: 1000]", - "name": "limit", + "description": "Fetch blobs before this time, exclusive (ISO 8601 format, example: 2006-01-02T15:04:05Z) [default: now]", + "name": "before", "in": "query" - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/v2.BlobFeedResponse" - } - }, - "400": { - "description": "error: Bad request", - "schema": { - "$ref": "#/definitions/v2.ErrorResponse" - } - }, - "404": { - "description": "error: Not found", - "schema": { - "$ref": "#/definitions/v2.ErrorResponse" - } }, - "500": { - "description": "error: Server error", - "schema": { - "$ref": "#/definitions/v2.ErrorResponse" - } - } - } - } - }, - "/blobs/feed/forward": { - "get": { - "produces": [ - "application/json" - ], - "tags": [ - "Blobs" - ], - "summary": "Fetch blob feed forward in time (oldest to newest)", - "parameters": [ { "type": "string", - "description": "Fetch blobs after this time (ISO 8601 format) [default: until - 1h]", + "description": "Fetch blobs after this time, exclusive (ISO 8601 format, example: 2006-01-02T15:04:05Z); must be smaller than 'before' [default: before-1h]", "name": "after", "in": "query" }, { "type": "string", - "description": "Stop fetching at this time (ISO 8601 format) [default: now]", - "name": "until", - "in": "query" - }, - { - "type": "string", - "description": "Pagination cursor for fetching newer items; override after [default: empty]", + "description": "Pagination cursor; for 'forward', fetches blobs from 'cursor' to 'before', for 'backward', fetches from 'after' to 'cursor' (all are exclusive) [default: empty]", "name": "cursor", "in": "query" }, diff --git a/disperser/dataapi/docs/v2/V2_swagger.json b/disperser/dataapi/docs/v2/V2_swagger.json index 50d34649d6..85b965429a 100644 --- a/disperser/dataapi/docs/v2/V2_swagger.json +++ b/disperser/dataapi/docs/v2/V2_swagger.json @@ -115,7 +115,7 @@ } } }, - "/blobs/feed/backward": { + "/blobs/feed": { "get": { "produces": [ "application/json" @@ -123,86 +123,30 @@ "tags": [ "Blobs" ], - "summary": "Fetch blob feed backward in time (newest to oldest)", + "summary": "Fetch blob feed in specified direction", "parameters": [ { "type": "string", - "description": "Fetch blobs before this time (ISO 8601 format) [default: now]", - "name": "before", - "in": "query" - }, - { - "type": "string", - "description": "Stop fetching at this time (ISO 8601 format) [default: now-1h]", - "name": "until", - "in": "query" + "description": "Direction to fetch: 'forward' or 'backward' [default: forward]", + "name": "direction", + "in": "query", + "required": true }, { "type": "string", - "description": "Pagination cursor for fetching older items; override before [default: empty]", - "name": "cursor", - "in": "query" - }, - { - "type": "integer", - "description": "Maximum number of blobs to fetch [default: 20; max: 1000]", - "name": "limit", + "description": "Fetch blobs before this time, exclusive (ISO 8601 format, example: 2006-01-02T15:04:05Z) [default: now]", + "name": "before", "in": "query" - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/v2.BlobFeedResponse" - } - }, - "400": { - "description": "error: Bad request", - "schema": { - "$ref": "#/definitions/v2.ErrorResponse" - } - }, - "404": { - "description": "error: Not found", - "schema": { - "$ref": "#/definitions/v2.ErrorResponse" - } }, - "500": { - "description": "error: Server error", - "schema": { - "$ref": "#/definitions/v2.ErrorResponse" - } - } - } - } - }, - "/blobs/feed/forward": { - "get": { - "produces": [ - "application/json" - ], - "tags": [ - "Blobs" - ], - "summary": "Fetch blob feed forward in time (oldest to newest)", - "parameters": [ { "type": "string", - "description": "Fetch blobs after this time (ISO 8601 format) [default: until - 1h]", + "description": "Fetch blobs after this time, exclusive (ISO 8601 format, example: 2006-01-02T15:04:05Z); must be smaller than 'before' [default: before-1h]", "name": "after", "in": "query" }, { "type": "string", - "description": "Stop fetching at this time (ISO 8601 format) [default: now]", - "name": "until", - "in": "query" - }, - { - "type": "string", - "description": "Pagination cursor for fetching newer items; override after [default: empty]", + "description": "Pagination cursor; for 'forward', fetches blobs from 'cursor' to 'before', for 'backward', fetches from 'after' to 'cursor' (all are exclusive) [default: empty]", "name": "cursor", "in": "query" }, diff --git a/disperser/dataapi/docs/v2/V2_swagger.yaml b/disperser/dataapi/docs/v2/V2_swagger.yaml index f3aba85765..4dcd916403 100644 --- a/disperser/dataapi/docs/v2/V2_swagger.yaml +++ b/disperser/dataapi/docs/v2/V2_swagger.yaml @@ -672,62 +672,27 @@ paths: summary: Fetch blob certificate by blob key v2 tags: - Blobs - /blobs/feed/backward: + /blobs/feed: get: parameters: - - description: 'Fetch blobs before this time (ISO 8601 format) [default: now]' + - description: 'Direction to fetch: ''forward'' or ''backward'' [default: forward]' in: query - name: before - type: string - - description: 'Stop fetching at this time (ISO 8601 format) [default: now-1h]' - in: query - name: until + name: direction + required: true type: string - - description: 'Pagination cursor for fetching older items; override before - [default: empty]' + - description: 'Fetch blobs before this time, exclusive (ISO 8601 format, example: + 2006-01-02T15:04:05Z) [default: now]' in: query - name: cursor + name: before type: string - - description: 'Maximum number of blobs to fetch [default: 20; max: 1000]' - in: query - name: limit - type: integer - produces: - - application/json - responses: - "200": - description: OK - schema: - $ref: '#/definitions/v2.BlobFeedResponse' - "400": - description: 'error: Bad request' - schema: - $ref: '#/definitions/v2.ErrorResponse' - "404": - description: 'error: Not found' - schema: - $ref: '#/definitions/v2.ErrorResponse' - "500": - description: 'error: Server error' - schema: - $ref: '#/definitions/v2.ErrorResponse' - summary: Fetch blob feed backward in time (newest to oldest) - tags: - - Blobs - /blobs/feed/forward: - get: - parameters: - - description: 'Fetch blobs after this time (ISO 8601 format) [default: until - - 1h]' + - description: 'Fetch blobs after this time, exclusive (ISO 8601 format, example: + 2006-01-02T15:04:05Z); must be smaller than ''before'' [default: before-1h]' in: query name: after type: string - - description: 'Stop fetching at this time (ISO 8601 format) [default: now]' - in: query - name: until - type: string - - description: 'Pagination cursor for fetching newer items; override after [default: - empty]' + - description: 'Pagination cursor; for ''forward'', fetches blobs from ''cursor'' + to ''before'', for ''backward'', fetches from ''after'' to ''cursor'' (all + are exclusive) [default: empty]' in: query name: cursor type: string @@ -754,7 +719,7 @@ paths: description: 'error: Server error' schema: $ref: '#/definitions/v2.ErrorResponse' - summary: Fetch blob feed forward in time (oldest to newest) + summary: Fetch blob feed in specified direction tags: - Blobs /metrics/summary: diff --git a/disperser/dataapi/v2/blobs.go b/disperser/dataapi/v2/blobs.go index 87caf3ba7e..5952b6b786 100644 --- a/disperser/dataapi/v2/blobs.go +++ b/disperser/dataapi/v2/blobs.go @@ -3,6 +3,7 @@ package v2 import ( "context" "encoding/hex" + "errors" "fmt" "net/http" "strconv" @@ -16,140 +17,87 @@ import ( "github.com/gin-gonic/gin" ) -// FetchBlobFeedForward godoc +// FetchBlobFeed godoc // -// @Summary Fetch blob feed forward in time (oldest to newest) +// @Summary Fetch blob feed in specified direction // @Tags Blobs // @Produce json -// @Param after query string false "Fetch blobs after this time (ISO 8601 format: 2006-01-02T15:04:05Z) [default: until - 1h]" -// @Param until query string false "Stop fetching at this time (ISO 8601 format: 2006-01-02T15:04:05Z) [default: now]" -// @Param cursor query string false "Pagination cursor for fetching newer items; override after param [default: empty]" -// @Param limit query int false "Maximum number of blobs to fetch [default: 20; max: 1000]" -// @Success 200 {object} BlobFeedResponse -// @Failure 400 {object} ErrorResponse "error: Bad request" -// @Failure 404 {object} ErrorResponse "error: Not found" -// @Failure 500 {object} ErrorResponse "error: Server error" -// @Router /blobs/feed/forward [get] -func (s *ServerV2) FetchBlobFeedForward(c *gin.Context) { +// @Param direction query string true "Direction to fetch: 'forward' or 'backward' [default: forward]" +// @Param before query string false "Fetch blobs before this time, exclusive (ISO 8601 format, example: 2006-01-02T15:04:05Z) [default: now]" +// @Param after query string false "Fetch blobs after this time, exclusive (ISO 8601 format, example: 2006-01-02T15:04:05Z); must be smaller than 'before' [default: before-1h]" +// @Param cursor query string false "Pagination cursor; for 'forward', fetches blobs from 'cursor' to 'before', for 'backward', fetches from 'after' to 'cursor' (all are exclusive) [default: empty]" +// @Param limit query int false "Maximum number of blobs to fetch [default: 20; max: 1000]" +// @Success 200 {object} BlobFeedResponse +// @Failure 400 {object} ErrorResponse "error: Bad request" +// @Failure 404 {object} ErrorResponse "error: Not found" +// @Failure 500 {object} ErrorResponse "error: Server error" +// @Router /blobs/feed [get] +func (s *ServerV2) FetchBlobFeed(c *gin.Context) { handlerStart := time.Now() var err error - now := handlerStart - oldestTime := now.Add(-maxBlobAge) - - untilTime := now - if c.Query("until") != "" { - untilTime, err = time.Parse("2006-01-02T15:04:05Z", c.Query("until")) - if err != nil { - s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedForward") - invalidParamsErrorResponse(c, fmt.Errorf("failed to parse until param: %w", err)) - return - } - if untilTime.Before(oldestTime) { - s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedForward") - invalidParamsErrorResponse(c, fmt.Errorf("until time cannot be more than 14 days in the past, found: %s", c.Query("until"))) + // Validate direction + direction := "forward" + if dirStr := c.Query("direction"); dirStr != "" { + if dirStr != "forward" && dirStr != "backward" { + s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed") + invalidParamsErrorResponse(c, fmt.Errorf("direction must be either 'forward' or 'backward', found: %s", dirStr)) return } + direction = dirStr } - limit, err := strconv.Atoi(c.DefaultQuery("limit", "20")) - if err != nil { - s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedForward") - invalidParamsErrorResponse(c, fmt.Errorf("failed to parse limit param: %w", err)) - return - } - if limit <= 0 || limit > maxNumBlobsPerBlobFeedResponse { - limit = maxNumBlobsPerBlobFeedResponse - } - - var afterCursor blobstore.BlobFeedCursor - - // Handle cursor (overrides after) - if cursorStr := c.Query("cursor"); cursorStr != "" { - cursor, err := new(blobstore.BlobFeedCursor).FromCursorKey(cursorStr) - if err != nil { - s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedForward") - invalidParamsErrorResponse(c, fmt.Errorf("failed to parse the cursor: %w", err)) - return - } - afterCursor = *cursor - } else { - // Use after parameter if no cursor - afterTime := untilTime.Add(-time.Hour) // default to 1 hour ago - if c.Query("after") != "" { - afterTime, err = time.Parse("2006-01-02T15:04:05Z", c.Query("after")) - if err != nil { - s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedForward") - invalidParamsErrorResponse(c, fmt.Errorf("failed to parse after param: %w", err)) - return - } - if afterTime.Before(oldestTime) { - afterTime = oldestTime - } - } - afterCursor = blobstore.BlobFeedCursor{ - RequestedAt: uint64(afterTime.UnixNano()), - } - } - - untilCursor := blobstore.BlobFeedCursor{ - RequestedAt: uint64(untilTime.UnixNano()), - } - - blobs, nextCursor, err := s.blobMetadataStore.GetBlobMetadataByRequestedAt( - c.Request.Context(), - afterCursor, - untilCursor, - limit, - ) - if err != nil { - s.metrics.IncrementFailedRequestNum("FetchBlobFeedForward") - errorResponse(c, fmt.Errorf("failed to fetch feed from blob metadata store: %w", err)) - return - } - s.sendBlobFeedResponse(c, blobs, nextCursor, handlerStart) -} - -// FetchBlobFeedBackward godoc -// -// @Summary Fetch blob feed backward in time (newest to oldest) -// @Tags Blobs -// @Produce json -// @Param before query string false "Fetch blobs before this time (ISO 8601 format: 2006-01-02T15:04:05Z) [default: now]" -// @Param until query string false "Stop fetching at this time (ISO 8601 format: 2006-01-02T15:04:05Z) [default: before - 1h]" -// @Param cursor query string false "Pagination cursor for fetching older items; override before param [default: empty]" -// @Param limit query int false "Maximum number of blobs to fetch [default: 20; max: 1000]" -// @Success 200 {object} BlobFeedResponse -// @Failure 400 {object} ErrorResponse "error: Bad request" -// @Failure 404 {object} ErrorResponse "error: Not found" -// @Failure 500 {object} ErrorResponse "error: Server error" -// @Router /blobs/feed/backward [get] -func (s *ServerV2) FetchBlobFeedBackward(c *gin.Context) { - handlerStart := time.Now() - var err error - now := handlerStart oldestTime := now.Add(-maxBlobAge) // Handle before parameter - beforeTime := now // default to now + beforeTime := now if c.Query("before") != "" { beforeTime, err = time.Parse("2006-01-02T15:04:05Z", c.Query("before")) if err != nil { - s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedBackward") + s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed") invalidParamsErrorResponse(c, fmt.Errorf("failed to parse before param: %w", err)) return } if beforeTime.Before(oldestTime) { - s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedBackward") + s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed") invalidParamsErrorResponse(c, fmt.Errorf("before time cannot be more than 14 days in the past, found: %s", c.Query("before"))) return } + if now.Before(beforeTime) { + beforeTime = now + } + } + + // Handle after parameter + afterTime := beforeTime.Add(-time.Hour) + if c.Query("after") != "" { + afterTime, err = time.Parse("2006-01-02T15:04:05Z", c.Query("after")) + if err != nil { + s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed") + invalidParamsErrorResponse(c, fmt.Errorf("failed to parse after param: %w", err)) + return + } + if now.Before(afterTime) { + s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed") + invalidParamsErrorResponse(c, fmt.Errorf("'after' must be before current time, found: %s", c.Query("after"))) + return + } + if afterTime.Before(oldestTime) { + afterTime = oldestTime + } + } + + // Validate time range + if !afterTime.Before(beforeTime) { + s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed") + invalidParamsErrorResponse(c, fmt.Errorf("after time must be before before time")) + return } limit, err := strconv.Atoi(c.DefaultQuery("limit", "20")) if err != nil { - s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedBackward") + s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed") invalidParamsErrorResponse(c, fmt.Errorf("failed to parse limit param: %w", err)) return } @@ -157,53 +105,55 @@ func (s *ServerV2) FetchBlobFeedBackward(c *gin.Context) { limit = maxNumBlobsPerBlobFeedResponse } - var beforeCursor blobstore.BlobFeedCursor + // Convert times to cursors + afterCursor := blobstore.BlobFeedCursor{ + RequestedAt: uint64(afterTime.UnixNano()), + } + beforeCursor := blobstore.BlobFeedCursor{ + RequestedAt: uint64(beforeTime.UnixNano()), + } - // Handle cursor (overrides before) + current := blobstore.BlobFeedCursor{ + RequestedAt: 0, + } + // Handle cursor if provided if cursorStr := c.Query("cursor"); cursorStr != "" { cursor, err := new(blobstore.BlobFeedCursor).FromCursorKey(cursorStr) if err != nil { - s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedBackward") + s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed") invalidParamsErrorResponse(c, fmt.Errorf("failed to parse the cursor: %w", err)) return } - beforeCursor = *cursor - } else { - beforeCursor = blobstore.BlobFeedCursor{ - RequestedAt: uint64(beforeTime.UnixNano()), - } + current = *cursor } - // Handle until parameter - untilTime := now.Add(-time.Hour) // default to 1 hour ago - if c.Query("until") != "" { - untilTime, err = time.Parse("2006-01-02T15:04:05Z", c.Query("until")) - if err != nil { - s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedBackward") - invalidParamsErrorResponse(c, fmt.Errorf("failed to parse until param: %w", err)) - return - } - if untilTime.Before(oldestTime) { - untilTime = oldestTime - } - } + var blobs []*v2.BlobMetadata + var nextCursor *blobstore.BlobFeedCursor - untilCursor := blobstore.BlobFeedCursor{ - RequestedAt: uint64(untilTime.UnixNano()), + if direction == "forward" { + startCursor := afterCursor + // The presence of `cusor` param will override the `after` param + if current.RequestedAt > 0 { + startCursor = current + } + blobs, nextCursor, err = s.blobMetadataStore.GetBlobMetadataByRequestedAt( + c.Request.Context(), + startCursor, + beforeCursor, + limit, + ) + } else { + // TODO(jianxiao): To be implemented + errorResponse(c, errors.New("Not Implemented")) + return } - // TODO(jianxiao): this is just a placeholder as GetBlobMetadataByRequested is doing forward retrieval - blobs, nextCursor, err := s.blobMetadataStore.GetBlobMetadataByRequestedAt( - c.Request.Context(), - untilCursor, - beforeCursor, - limit, - ) if err != nil { - s.metrics.IncrementFailedRequestNum("FetchBlobFeedBackward") + s.metrics.IncrementFailedRequestNum("FetchBlobFeed") errorResponse(c, fmt.Errorf("failed to fetch feed from blob metadata store: %w", err)) return } + s.sendBlobFeedResponse(c, blobs, nextCursor, handlerStart) } diff --git a/disperser/dataapi/v2/server_v2.go b/disperser/dataapi/v2/server_v2.go index d65e6b28b4..e1a5a08937 100644 --- a/disperser/dataapi/v2/server_v2.go +++ b/disperser/dataapi/v2/server_v2.go @@ -263,8 +263,7 @@ func (s *ServerV2) Start() error { { blobs := v2.Group("/blobs") { - blobs.GET("/feed/forward", s.FetchBlobFeedForward) - blobs.GET("/feed/backward", s.FetchBlobFeedBackward) + blobs.GET("/feed/forward", s.FetchBlobFeed) blobs.GET("/:blob_key", s.FetchBlob) blobs.GET("/:blob_key/certificate", s.FetchBlobCertificate) blobs.GET("/:blob_key/attestation-info", s.FetchBlobAttestationInfo) diff --git a/disperser/dataapi/v2/server_v2_test.go b/disperser/dataapi/v2/server_v2_test.go index 6f8eb08e78..91e4f98d33 100644 --- a/disperser/dataapi/v2/server_v2_test.go +++ b/disperser/dataapi/v2/server_v2_test.go @@ -444,15 +444,16 @@ func TestFetchBlobFeed(t *testing.T) { defer deleteItems(t, dynamoKeys) - r.GET("/v2/blobs/feed/forward", testDataApiServerV2.FetchBlobFeedForward) + r.GET("/v2/blobs/feed", testDataApiServerV2.FetchBlobFeed) t.Run("invalid params", func(t *testing.T) { reqUrls := []string{ - "/v2/blobs/feed/forward?cursor=abc", - "/v2/blobs/feed/forward?limit=abc", - "/v2/blobs/feed/forward?after=abc", - "/v2/blobs/feed/forward?until=2006-01-02T15:04:05", - "/v2/blobs/feed/forward?until=2006-01-02T15:04:05Z", + "/v2/blobs/feed?direction=abc", + "/v2/blobs/feed?cursor=abc", + "/v2/blobs/feed?limit=abc", + "/v2/blobs/feed?after=abc", + "/v2/blobs/feed?before=2006-01-02T15:04:05", + "/v2/blobs/feed?before=2006-01-02T15:04:05Z", } for _, url := range reqUrls { w := httptest.NewRecorder() @@ -467,7 +468,7 @@ func TestFetchBlobFeed(t *testing.T) { // - Most recent 1 hour of blobs (60 blobs total available, keys[43], ..., keys[102]) // - Limited to 20 results (the default "limit") // - Starting from blob[43] through blob[62] - w := executeRequest(t, r, http.MethodGet, "/v2/blobs/feed/forward") + w := executeRequest(t, r, http.MethodGet, "/v2/blobs/feed") response := decodeResponseBody[serverv2.BlobFeedResponse](t, w) require.Equal(t, 20, len(response.Blobs)) for i := 0; i < 20; i++ { @@ -481,7 +482,7 @@ func TestFetchBlobFeed(t *testing.T) { t.Run("various query ranges and limits", func(t *testing.T) { // Test 1: Unlimited results in 1-hour window // Returns keys[43] through keys[102] (60 blobs) - w := executeRequest(t, r, http.MethodGet, "/v2/blobs/feed/forward?limit=0") + w := executeRequest(t, r, http.MethodGet, "/v2/blobs/feed?limit=0") response := decodeResponseBody[serverv2.BlobFeedResponse](t, w) require.Equal(t, 60, len(response.Blobs)) for i := 0; i < 60; i++ { @@ -494,7 +495,7 @@ func TestFetchBlobFeed(t *testing.T) { // Test 2: 2-hour window captures all test blobs // Verifies correct ordering of timestamp-colliding blobs afterTime := time.Now().Add(-2 * time.Hour).Format("2006-01-02T15:04:05.999999999Z") // nano precision format - reqUrl := fmt.Sprintf("/v2/blobs/feed/forward?after=%s&limit=-1", afterTime) + reqUrl := fmt.Sprintf("/v2/blobs/feed?after=%s&limit=-1", afterTime) w = executeRequest(t, r, http.MethodGet, reqUrl) response = decodeResponseBody[serverv2.BlobFeedResponse](t, w) require.Equal(t, numBlobs, len(response.Blobs)) @@ -513,7 +514,7 @@ func TestFetchBlobFeed(t *testing.T) { // Retrieves keys[41] through keys[100] tm := time.Unix(0, int64(requestedAt[100])+1).UTC() endTime := tm.Format("2006-01-02T15:04:05.999999999Z") - reqUrl = fmt.Sprintf("/v2/blobs/feed/forward?until=%s&limit=-1", endTime) + reqUrl = fmt.Sprintf("/v2/blobs/feed?before=%s&limit=-1", endTime) w = executeRequest(t, r, http.MethodGet, reqUrl) response = decodeResponseBody[serverv2.BlobFeedResponse](t, w) require.Equal(t, 60, len(response.Blobs)) @@ -534,7 +535,7 @@ func TestFetchBlobFeed(t *testing.T) { // - Proper token handling tm := time.Unix(0, time.Now().UnixNano()).UTC() endTime := tm.Format("2006-01-02T15:04:05.999999999Z") // nano precision format - reqUrl := fmt.Sprintf("/v2/blobs/feed/forward?until=%s&limit=20", endTime) + reqUrl := fmt.Sprintf("/v2/blobs/feed?before=%s&limit=20", endTime) w := executeRequest(t, r, http.MethodGet, reqUrl) response := decodeResponseBody[serverv2.BlobFeedResponse](t, w) require.Equal(t, 20, len(response.Blobs)) @@ -546,7 +547,7 @@ func TestFetchBlobFeed(t *testing.T) { checkCursor(t, response.Cursor, requestedAt[62], keys[62]) // Request next page using pagination cursor - reqUrl = fmt.Sprintf("/v2/blobs/feed/forward?until=%s&limit=20&cursor=%s", endTime, response.Cursor) + reqUrl = fmt.Sprintf("/v2/blobs/feed?before=%s&limit=20&cursor=%s", endTime, response.Cursor) w = executeRequest(t, r, http.MethodGet, reqUrl) response = decodeResponseBody[serverv2.BlobFeedResponse](t, w) require.Equal(t, 20, len(response.Blobs)) @@ -567,7 +568,7 @@ func TestFetchBlobFeed(t *testing.T) { endTime := tm.Format("2006-01-02T15:04:05.999999999Z") // nano precision format // First page: fetch 2 blobs, which have same requestedAt timestamp - reqUrl := fmt.Sprintf("/v2/blobs/feed/forward?until=%s&limit=2", endTime) + reqUrl := fmt.Sprintf("/v2/blobs/feed?before=%s&limit=2", endTime) w := executeRequest(t, r, http.MethodGet, reqUrl) response := decodeResponseBody[serverv2.BlobFeedResponse](t, w) require.Equal(t, 2, len(response.Blobs)) @@ -579,7 +580,7 @@ func TestFetchBlobFeed(t *testing.T) { checkCursor(t, response.Cursor, requestedAt[1], firstBlobKeys[1]) // Second page: fetch remaining blobs (limit=0 means no limit, hence reach the last blob) - reqUrl = fmt.Sprintf("/v2/blobs/feed/forward?until=%s&limit=0&cursor=%s", endTime, response.Cursor) + reqUrl = fmt.Sprintf("/v2/blobs/feed?before=%s&limit=0&cursor=%s", endTime, response.Cursor) w = executeRequest(t, r, http.MethodGet, reqUrl) response = decodeResponseBody[serverv2.BlobFeedResponse](t, w) // Verify second page contains: