|
| 1 | +/* |
| 2 | + * This source file was generated by the Gradle 'init' task |
| 3 | + */ |
| 4 | +package org.example; |
| 5 | + |
| 6 | +import com.openlayer.api.client.OpenlayerClient; |
| 7 | +import com.openlayer.api.client.okhttp.OpenlayerOkHttpClient; |
| 8 | +import com.openlayer.api.core.JsonNumber; |
| 9 | +import com.openlayer.api.core.JsonString; |
| 10 | +import com.openlayer.api.models.InferencePipelineDataStreamParams; |
| 11 | +import com.openlayer.api.models.InferencePipelineDataStreamResponse; |
| 12 | + |
| 13 | +import java.util.List; |
| 14 | + |
| 15 | +public class StreamData { |
| 16 | + public static void main(String[] args) { |
| 17 | + // Configure the client using environment variables |
| 18 | + OpenlayerClient client = OpenlayerOkHttpClient.fromEnv(); |
| 19 | + |
| 20 | + // Or you can configure the client with additional properties |
| 21 | + /* |
| 22 | + * OpenlayerClient client = OpenlayerOkHttpClient.builder() |
| 23 | + * .fromEnv() |
| 24 | + * // Additional properties can be set here |
| 25 | + * .build(); |
| 26 | + */ |
| 27 | + |
| 28 | + // Replace with your inference pipeline id |
| 29 | + String inferencePipelineId = "your-inference-pipeline-id"; |
| 30 | + |
| 31 | + // Let's say we want to stream the following row, which represents a model |
| 32 | + // prediction: |
| 33 | + // Define a row with the relevant fields |
| 34 | + InferencePipelineDataStreamParams.Row row = InferencePipelineDataStreamParams.Row.builder() |
| 35 | + .putAdditionalProperty("user_query", JsonString.of("what's the meaning of life?")) |
| 36 | + .putAdditionalProperty("output", JsonString.of("42")) |
| 37 | + .putAdditionalProperty("tokens", JsonNumber.of(7)) |
| 38 | + .putAdditionalProperty("cost", JsonNumber.of(0.02)) |
| 39 | + .putAdditionalProperty("timestamp", JsonNumber.of(1620000000)) |
| 40 | + .build(); |
| 41 | + |
| 42 | + // Create Inference Pipeline Data Stream Parameters |
| 43 | + InferencePipelineDataStreamParams params = InferencePipelineDataStreamParams.builder() |
| 44 | + .inferencePipelineId(inferencePipelineId) |
| 45 | + .rows(List.of(row)) |
| 46 | + .config(InferencePipelineDataStreamParams.Config |
| 47 | + .ofLlmData(InferencePipelineDataStreamParams.Config.LlmData.builder() |
| 48 | + .outputColumnName("output") |
| 49 | + .costColumnName("cost") |
| 50 | + .inputVariableNames(List.of("user_query")) |
| 51 | + .numOfTokenColumnName("tokens") |
| 52 | + .timestampColumnName("timestamp") |
| 53 | + .build())) |
| 54 | + .build(); |
| 55 | + |
| 56 | + // Execute the request |
| 57 | + InferencePipelineDataStreamResponse inferencePipelineDataStreamResponse = client.inferencePipelines().data() |
| 58 | + .stream(params); |
| 59 | + |
| 60 | + // Print the response |
| 61 | + System.out.println(inferencePipelineDataStreamResponse); |
| 62 | + } |
| 63 | +} |
0 commit comments