-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathStreamData.java
62 lines (54 loc) · 2.73 KB
/
StreamData.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
/*
* This source file was generated by the Gradle 'init' task
*/
package com.openlayer.api.example;
import com.openlayer.api.client.OpenlayerClient;
import com.openlayer.api.client.okhttp.OpenlayerOkHttpClient;
import com.openlayer.api.core.JsonNumber;
import com.openlayer.api.core.JsonString;
import com.openlayer.api.models.InferencePipelineDataStreamParams;
import com.openlayer.api.models.InferencePipelineDataStreamResponse;
import java.util.Arrays;
public class StreamData {
public static void main(String[] args) {
// Configure the client using environment variables
OpenlayerClient client = OpenlayerOkHttpClient.fromEnv();
// Or you can configure the client with additional properties
/*
* OpenlayerClient client = OpenlayerOkHttpClient.builder()
* .fromEnv()
* // Additional properties can be set here
* .build();
*/
// Replace with your inference pipeline id
String inferencePipelineId = "your-inference-pipeline-id";
// Let's say we want to stream the following row, which represents a model
// prediction:
// Define a row with the relevant fields
InferencePipelineDataStreamParams.Row row = InferencePipelineDataStreamParams.Row.builder()
.putAdditionalProperty("user_query", JsonString.of("what's the meaning of life?"))
.putAdditionalProperty("output", JsonString.of("42"))
.putAdditionalProperty("tokens", JsonNumber.of(7))
.putAdditionalProperty("cost", JsonNumber.of(0.02))
.putAdditionalProperty("timestamp", JsonNumber.of(1620000000))
.build();
// Create Inference Pipeline Data Stream Parameters
InferencePipelineDataStreamParams params = InferencePipelineDataStreamParams.builder()
.inferencePipelineId(inferencePipelineId)
.rows(Arrays.asList(row))
.config(InferencePipelineDataStreamParams.Config.ofLlmData(
InferencePipelineDataStreamParams.Config.LlmData.builder()
.outputColumnName("output")
.costColumnName("cost")
.inputVariableNames(Arrays.asList("user_query"))
.numOfTokenColumnName("tokens")
.timestampColumnName("timestamp")
.build()))
.build();
// Execute the request
InferencePipelineDataStreamResponse inferencePipelineDataStreamResponse =
client.inferencePipelines().data().stream(params);
// Print the response
System.out.println(inferencePipelineDataStreamResponse);
}
}