Skip to content

Commit c6306e7

Browse files
committed
Make blob feed API into two-way APIs
1 parent 268a3a9 commit c6306e7

File tree

3 files changed

+217
-112
lines changed

3 files changed

+217
-112
lines changed

disperser/dataapi/v2/blobs.go

Lines changed: 179 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -10,129 +10,201 @@ import (
1010

1111
"github.com/Layr-Labs/eigenda/core"
1212
corev2 "github.com/Layr-Labs/eigenda/core/v2"
13+
v2 "github.com/Layr-Labs/eigenda/disperser/common/v2"
1314
"github.com/Layr-Labs/eigenda/disperser/common/v2/blobstore"
1415
"github.com/Layr-Labs/eigenda/disperser/dataapi"
1516
"github.com/gin-gonic/gin"
1617
)
1718

18-
// FetchBlobFeed godoc
19+
// FetchBlobFeedForward godoc
1920
//
20-
// @Summary Fetch blob feed
21-
// @Tags Blobs
22-
// @Produce json
23-
// @Param end query string false "Fetch blobs up to the end time (ISO 8601 format: 2006-01-02T15:04:05Z) [default: now]"
24-
// @Param interval query int false "Fetch blobs starting from an interval (in seconds) before the end time [default: 3600]"
25-
// @Param pagination_token query string false "Fetch blobs starting from the pagination token (exclusively). Overrides the interval param if specified [default: empty]"
26-
// @Param limit query int false "The maximum number of blobs to fetch. System max (1000) if limit <= 0 [default: 20; max: 1000]"
27-
// @Success 200 {object} BlobFeedResponse
28-
// @Failure 400 {object} ErrorResponse "error: Bad request"
29-
// @Failure 404 {object} ErrorResponse "error: Not found"
30-
// @Failure 500 {object} ErrorResponse "error: Server error"
31-
// @Router /blobs/feed [get]
32-
func (s *ServerV2) FetchBlobFeed(c *gin.Context) {
21+
// @Summary Fetch blob feed forward in time (oldest to newest)
22+
// @Tags Blobs
23+
// @Produce json
24+
// @Param after query string false "Fetch blobs after this time (ISO 8601 format) [default: until - 1h]"
25+
// @Param until query string false "Stop fetching at this time (ISO 8601 format) [default: now]"
26+
// @Param cursor query string false "Pagination cursor for fetching newer items; override after [default: empty]"
27+
// @Param limit query int false "Maximum number of blobs to fetch [default: 20; max: 1000]"
28+
// @Success 200 {object} BlobFeedResponse
29+
// @Failure 400 {object} ErrorResponse "error: Bad request"
30+
// @Failure 404 {object} ErrorResponse "error: Not found"
31+
// @Failure 500 {object} ErrorResponse "error: Server error"
32+
// @Router /blobs/feed/forward [get]
33+
func (s *ServerV2) FetchBlobFeedForward(c *gin.Context) {
3334
handlerStart := time.Now()
3435
var err error
3536

3637
now := handlerStart
3738
oldestTime := now.Add(-maxBlobAge)
3839

39-
endTime := now
40-
if c.Query("end") != "" {
41-
endTime, err = time.Parse("2006-01-02T15:04:05Z", c.Query("end"))
42-
if err != nil {
43-
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed")
44-
invalidParamsErrorResponse(c, fmt.Errorf("failed to parse end param: %w", err))
45-
return
46-
}
47-
if endTime.Before(oldestTime) {
48-
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed")
49-
invalidParamsErrorResponse(c, fmt.Errorf("end time cannot be more than 14 days in the past, found: %s", c.Query("end")))
50-
return
51-
}
52-
}
53-
54-
interval := 3600
55-
if c.Query("interval") != "" {
56-
interval, err = strconv.Atoi(c.Query("interval"))
40+
untilTime := now
41+
if c.Query("until") != "" {
42+
untilTime, err = time.Parse("2006-01-02T15:04:05Z", c.Query("until"))
5743
if err != nil {
58-
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed")
59-
invalidParamsErrorResponse(c, fmt.Errorf("failed to parse interval param: %w", err))
44+
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedForward")
45+
invalidParamsErrorResponse(c, fmt.Errorf("failed to parse until param: %w", err))
6046
return
6147
}
62-
if interval <= 0 {
63-
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed")
64-
invalidParamsErrorResponse(c, fmt.Errorf("interval must be greater than 0, found: %d", interval))
48+
if untilTime.Before(oldestTime) {
49+
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedForward")
50+
invalidParamsErrorResponse(c, fmt.Errorf("until time cannot be more than 14 days in the past, found: %s", c.Query("until")))
6551
return
6652
}
6753
}
6854

6955
limit, err := strconv.Atoi(c.DefaultQuery("limit", "20"))
7056
if err != nil {
71-
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed")
57+
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedForward")
7258
invalidParamsErrorResponse(c, fmt.Errorf("failed to parse limit param: %w", err))
7359
return
7460
}
7561
if limit <= 0 || limit > maxNumBlobsPerBlobFeedResponse {
7662
limit = maxNumBlobsPerBlobFeedResponse
7763
}
7864

79-
paginationCursor := blobstore.BlobFeedCursor{
80-
RequestedAt: 0,
81-
}
82-
if c.Query("pagination_token") != "" {
83-
cursor, err := paginationCursor.FromCursorKey(c.Query("pagination_token"))
65+
var afterCursor blobstore.BlobFeedCursor
66+
67+
// Handle cursor (overrides after)
68+
if cursorStr := c.Query("cursor"); cursorStr != "" {
69+
cursor, err := new(blobstore.BlobFeedCursor).FromCursorKey(cursorStr)
8470
if err != nil {
85-
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeed")
86-
invalidParamsErrorResponse(c, fmt.Errorf("failed to parse the pagination token: %w", err))
71+
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedForward")
72+
invalidParamsErrorResponse(c, fmt.Errorf("failed to parse the cursor: %w", err))
8773
return
8874
}
89-
paginationCursor = *cursor
75+
afterCursor = *cursor
76+
} else {
77+
// Use after parameter if no cursor
78+
afterTime := untilTime.Add(-time.Hour) // default to 1 hour ago
79+
if c.Query("after") != "" {
80+
afterTime, err = time.Parse("2006-01-02T15:04:05Z", c.Query("after"))
81+
if err != nil {
82+
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedForward")
83+
invalidParamsErrorResponse(c, fmt.Errorf("failed to parse after param: %w", err))
84+
return
85+
}
86+
if afterTime.Before(oldestTime) {
87+
afterTime = oldestTime
88+
}
89+
}
90+
afterCursor = blobstore.BlobFeedCursor{
91+
RequestedAt: uint64(afterTime.UnixNano()),
92+
}
9093
}
9194

92-
startTime := endTime.Add(-time.Duration(interval) * time.Second)
93-
if startTime.Before(oldestTime) {
94-
startTime = oldestTime
95+
untilCursor := blobstore.BlobFeedCursor{
96+
RequestedAt: uint64(untilTime.UnixNano()),
9597
}
96-
startCursor := blobstore.BlobFeedCursor{
97-
RequestedAt: uint64(startTime.UnixNano()),
98-
}
99-
if startCursor.LessThan(&paginationCursor) {
100-
startCursor = paginationCursor
98+
99+
blobs, nextCursor, err := s.blobMetadataStore.GetBlobMetadataByRequestedAt(
100+
c.Request.Context(),
101+
afterCursor,
102+
untilCursor,
103+
limit,
104+
)
105+
if err != nil {
106+
s.metrics.IncrementFailedRequestNum("FetchBlobFeedForward")
107+
errorResponse(c, fmt.Errorf("failed to fetch feed from blob metadata store: %w", err))
108+
return
101109
}
102-
endCursor := blobstore.BlobFeedCursor{
103-
RequestedAt: uint64(endTime.UnixNano()),
110+
s.sendBlobFeedResponse(c, blobs, nextCursor, handlerStart)
111+
}
112+
113+
// FetchBlobFeedBackward godoc
114+
//
115+
// @Summary Fetch blob feed backward in time (newest to oldest)
116+
// @Tags Blobs
117+
// @Produce json
118+
// @Param before query string false "Fetch blobs before this time (ISO 8601 format) [default: now]"
119+
// @Param until query string false "Stop fetching at this time (ISO 8601 format) [default: now-1h]"
120+
// @Param cursor query string false "Pagination cursor for fetching older items; override before [default: empty]"
121+
// @Param limit query int false "Maximum number of blobs to fetch [default: 20; max: 1000]"
122+
// @Success 200 {object} BlobFeedResponse
123+
// @Failure 400 {object} ErrorResponse "error: Bad request"
124+
// @Failure 404 {object} ErrorResponse "error: Not found"
125+
// @Failure 500 {object} ErrorResponse "error: Server error"
126+
// @Router /blobs/feed/backward [get]
127+
func (s *ServerV2) FetchBlobFeedBackward(c *gin.Context) {
128+
handlerStart := time.Now()
129+
var err error
130+
131+
now := handlerStart
132+
oldestTime := now.Add(-maxBlobAge)
133+
134+
// Handle before parameter
135+
beforeTime := now // default to now
136+
if c.Query("before") != "" {
137+
beforeTime, err = time.Parse("2006-01-02T15:04:05Z", c.Query("before"))
138+
if err != nil {
139+
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedBackward")
140+
invalidParamsErrorResponse(c, fmt.Errorf("failed to parse before param: %w", err))
141+
return
142+
}
143+
if beforeTime.Before(oldestTime) {
144+
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedBackward")
145+
invalidParamsErrorResponse(c, fmt.Errorf("before time cannot be more than 14 days in the past, found: %s", c.Query("before")))
146+
return
147+
}
104148
}
105149

106-
blobs, paginationToken, err := s.blobMetadataStore.GetBlobMetadataByRequestedAt(c.Request.Context(), startCursor, endCursor, limit)
150+
limit, err := strconv.Atoi(c.DefaultQuery("limit", "20"))
107151
if err != nil {
108-
s.metrics.IncrementFailedRequestNum("FetchBlobFeed")
109-
errorResponse(c, fmt.Errorf("failed to fetch feed from blob metadata store: %w", err))
152+
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedBackward")
153+
invalidParamsErrorResponse(c, fmt.Errorf("failed to parse limit param: %w", err))
110154
return
111155
}
156+
if limit <= 0 || limit > maxNumBlobsPerBlobFeedResponse {
157+
limit = maxNumBlobsPerBlobFeedResponse
158+
}
112159

113-
token := ""
114-
if paginationToken != nil {
115-
token = paginationToken.ToCursorKey()
160+
var beforeCursor blobstore.BlobFeedCursor
161+
162+
// Handle cursor (overrides before)
163+
if cursorStr := c.Query("cursor"); cursorStr != "" {
164+
cursor, err := new(blobstore.BlobFeedCursor).FromCursorKey(cursorStr)
165+
if err != nil {
166+
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedBackward")
167+
invalidParamsErrorResponse(c, fmt.Errorf("failed to parse the cursor: %w", err))
168+
return
169+
}
170+
beforeCursor = *cursor
171+
} else {
172+
beforeCursor = blobstore.BlobFeedCursor{
173+
RequestedAt: uint64(beforeTime.UnixNano()),
174+
}
116175
}
117-
blobInfo := make([]BlobInfo, len(blobs))
118-
for i := 0; i < len(blobs); i++ {
119-
bk, err := blobs[i].BlobHeader.BlobKey()
176+
177+
// Handle until parameter
178+
untilTime := now.Add(-time.Hour) // default to 1 hour ago
179+
if c.Query("until") != "" {
180+
untilTime, err = time.Parse("2006-01-02T15:04:05Z", c.Query("until"))
120181
if err != nil {
121-
s.metrics.IncrementFailedRequestNum("FetchBlobFeed")
122-
errorResponse(c, fmt.Errorf("failed to serialize blob key: %w", err))
182+
s.metrics.IncrementInvalidArgRequestNum("FetchBlobFeedBackward")
183+
invalidParamsErrorResponse(c, fmt.Errorf("failed to parse until param: %w", err))
123184
return
124185
}
125-
blobInfo[i].BlobKey = bk.Hex()
126-
blobInfo[i].BlobMetadata = blobs[i]
186+
if untilTime.Before(oldestTime) {
187+
untilTime = oldestTime
188+
}
127189
}
128-
response := &BlobFeedResponse{
129-
Blobs: blobInfo,
130-
PaginationToken: token,
190+
191+
untilCursor := blobstore.BlobFeedCursor{
192+
RequestedAt: uint64(untilTime.UnixNano()),
131193
}
132-
c.Writer.Header().Set(cacheControlParam, fmt.Sprintf("max-age=%d", maxFeedBlobAge))
133-
s.metrics.IncrementSuccessfulRequestNum("FetchBlobFeed")
134-
s.metrics.ObserveLatency("FetchBlobFeed", time.Since(handlerStart))
135-
c.JSON(http.StatusOK, response)
194+
195+
// TODO(jianxiao): this is just a placeholder as GetBlobMetadataByRequested is doing forward retrieval
196+
blobs, nextCursor, err := s.blobMetadataStore.GetBlobMetadataByRequestedAt(
197+
c.Request.Context(),
198+
untilCursor,
199+
beforeCursor,
200+
limit,
201+
)
202+
if err != nil {
203+
s.metrics.IncrementFailedRequestNum("FetchBlobFeedBackward")
204+
errorResponse(c, fmt.Errorf("failed to fetch feed from blob metadata store: %w", err))
205+
return
206+
}
207+
s.sendBlobFeedResponse(c, blobs, nextCursor, handlerStart)
136208
}
137209

138210
// FetchBlob godoc
@@ -356,3 +428,34 @@ func (s *ServerV2) getAllOperatorsForAttestation(ctx context.Context, attestatio
356428

357429
return operatorList, operatorsByQuorum, nil
358430
}
431+
432+
func (s *ServerV2) sendBlobFeedResponse(
433+
c *gin.Context,
434+
blobs []*v2.BlobMetadata,
435+
nextCursor *blobstore.BlobFeedCursor,
436+
handlerStart time.Time,
437+
) {
438+
cursorStr := ""
439+
if nextCursor != nil {
440+
cursorStr = nextCursor.ToCursorKey()
441+
}
442+
blobInfo := make([]BlobInfo, len(blobs))
443+
for i := 0; i < len(blobs); i++ {
444+
bk, err := blobs[i].BlobHeader.BlobKey()
445+
if err != nil {
446+
s.metrics.IncrementFailedRequestNum("FetchBlobFeedForward")
447+
errorResponse(c, fmt.Errorf("failed to serialize blob key: %w", err))
448+
return
449+
}
450+
blobInfo[i].BlobKey = bk.Hex()
451+
blobInfo[i].BlobMetadata = blobs[i]
452+
}
453+
response := &BlobFeedResponse{
454+
Blobs: blobInfo,
455+
Cursor: cursorStr,
456+
}
457+
c.Writer.Header().Set(cacheControlParam, fmt.Sprintf("max-age=%d", maxFeedBlobAge))
458+
s.metrics.IncrementSuccessfulRequestNum("FetchBlobFeedForward")
459+
s.metrics.ObserveLatency("FetchBlobFeedForward", time.Since(handlerStart))
460+
c.JSON(http.StatusOK, response)
461+
}

disperser/dataapi/v2/server_v2.go

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -92,8 +92,8 @@ type (
9292
BlobMetadata *disperserv2.BlobMetadata `json:"blob_metadata"`
9393
}
9494
BlobFeedResponse struct {
95-
Blobs []BlobInfo `json:"blobs"`
96-
PaginationToken string `json:"pagination_token"`
95+
Blobs []BlobInfo `json:"blobs"`
96+
Cursor string `json:"cursor"`
9797
}
9898

9999
BatchResponse struct {
@@ -263,7 +263,8 @@ func (s *ServerV2) Start() error {
263263
{
264264
blobs := v2.Group("/blobs")
265265
{
266-
blobs.GET("/feed", s.FetchBlobFeed)
266+
blobs.GET("/feed/forward", s.FetchBlobFeedForward)
267+
blobs.GET("/feed/backward", s.FetchBlobFeedBackward)
267268
blobs.GET("/:blob_key", s.FetchBlob)
268269
blobs.GET("/:blob_key/certificate", s.FetchBlobCertificate)
269270
blobs.GET("/:blob_key/attestation-info", s.FetchBlobAttestationInfo)

0 commit comments

Comments
 (0)