Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 0aea013

Browse files
Update java ml module gradle (#37471)
* gradle and formating * openai module fix * add testing doc * revert deps
1 parent d32bc97 commit 0aea013

19 files changed

Lines changed: 885 additions & 769 deletions

File tree

sdks/java/ml/inference/openai/build.gradle

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,17 @@
1717
*/
1818
plugins {
1919
id 'org.apache.beam.module'
20-
id 'java'
2120
}
2221

22+
applyJavaNature(
23+
automaticModuleName: 'org.apache.beam.sdk.ml.inference.openai',
24+
requireJavaVersion: JavaVersion.VERSION_11
25+
)
26+
provideIntegrationTestingDependencies()
27+
enableJavaPerformanceTesting()
28+
2329
description = "Apache Beam :: SDKs :: Java :: ML :: Inference :: OpenAI"
30+
ext.summary = "OpenAI model handler for remote inference"
2431

2532
dependencies {
2633
implementation project(":sdks:java:ml:inference:remote")

sdks/java/ml/inference/openai/src/main/java/org/apache/beam/sdk/ml/inference/openai/OpenAIModelHandler.java

Lines changed: 41 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,13 @@
44
* distributed with this work for additional information
55
* regarding copyright ownership. The ASF licenses this file
66
* to you under the Apache License, Version 2.0 (the
7-
* License); you may not use this file except in compliance
7+
* "License"); you may not use this file except in compliance
88
* with the License. You may obtain a copy of the License at
99
*
1010
* http://www.apache.org/licenses/LICENSE-2.0
1111
*
1212
* Unless required by applicable law or agreed to in writing, software
13-
* distributed under the License is distributed on an AS IS BASIS,
13+
* distributed under the License is distributed on an "AS IS" BASIS,
1414
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1515
* See the License for the specific language governing permissions and
1616
* limitations under the License.
@@ -26,20 +26,20 @@
2626
import com.openai.core.JsonSchemaLocalValidation;
2727
import com.openai.models.responses.ResponseCreateParams;
2828
import com.openai.models.responses.StructuredResponseCreateParams;
29-
import org.apache.beam.sdk.ml.inference.remote.BaseModelHandler;
30-
import org.apache.beam.sdk.ml.inference.remote.PredictionResult;
31-
3229
import java.util.List;
3330
import java.util.stream.Collectors;
31+
import org.apache.beam.sdk.ml.inference.remote.BaseModelHandler;
32+
import org.apache.beam.sdk.ml.inference.remote.PredictionResult;
3433

3534
/**
3635
* Model handler for OpenAI API inference requests.
3736
*
38-
* <p>This handler manages communication with OpenAI's API, including client initialization,
39-
* request formatting, and response parsing. It uses OpenAI's structured output feature to
40-
* ensure reliable input-output pairing.
37+
* <p>This handler manages communication with OpenAI's API, including client initialization, request
38+
* formatting, and response parsing. It uses OpenAI's structured output feature to ensure reliable
39+
* input-output pairing.
4140
*
4241
* <h3>Usage</h3>
42+
*
4343
* <pre>{@code
4444
* OpenAIModelParameters params = OpenAIModelParameters.builder()
4545
* .apiKey("sk-...")
@@ -55,10 +55,10 @@
5555
* .withParameters(params)
5656
* );
5757
* }</pre>
58-
*
5958
*/
59+
@SuppressWarnings("nullness")
6060
public class OpenAIModelHandler
61-
implements BaseModelHandler<OpenAIModelParameters, OpenAIModelInput, OpenAIModelResponse> {
61+
implements BaseModelHandler<OpenAIModelParameters, OpenAIModelInput, OpenAIModelResponse> {
6262

6363
private transient OpenAIClient client;
6464
private OpenAIModelParameters modelParameters;
@@ -67,69 +67,67 @@ public class OpenAIModelHandler
6767
/**
6868
* Initializes the OpenAI client with the provided parameters.
6969
*
70-
* <p>This method is called once during setup. It creates an authenticated
71-
* OpenAI client using the API key from the parameters.
70+
* <p>This method is called once during setup. It creates an authenticated OpenAI client using the
71+
* API key from the parameters.
7272
*
7373
* @param parameters the configuration parameters including API key and model name
7474
*/
7575
@Override
7676
public void createClient(OpenAIModelParameters parameters) {
7777
this.modelParameters = parameters;
78-
this.client = OpenAIOkHttpClient.builder()
79-
.apiKey(this.modelParameters.getApiKey())
80-
.build();
78+
this.client = OpenAIOkHttpClient.builder().apiKey(this.modelParameters.getApiKey()).build();
8179
this.objectMapper = new ObjectMapper();
8280
}
8381

8482
/**
8583
* Performs inference on a batch of inputs using the OpenAI Client.
8684
*
8785
* <p>This method serializes the input batch to JSON string, sends it to OpenAI with structured
88-
* output requirements, and parses the response into {@link PredictionResult} objects
89-
* that pair each input with its corresponding output.
86+
* output requirements, and parses the response into {@link PredictionResult} objects that pair
87+
* each input with its corresponding output.
9088
*
9189
* @param input the list of inputs to process
9290
* @return an iterable of model results and input pairs
9391
*/
9492
@Override
95-
public Iterable<PredictionResult<OpenAIModelInput, OpenAIModelResponse>> request(List<OpenAIModelInput> input) {
93+
public Iterable<PredictionResult<OpenAIModelInput, OpenAIModelResponse>> request(
94+
List<OpenAIModelInput> input) {
9695

9796
try {
9897
// Convert input list to JSON string
9998
String inputBatch =
100-
objectMapper.writeValueAsString(
101-
input.stream()
102-
.map(OpenAIModelInput::getModelInput)
103-
.collect(Collectors.toList()));
99+
objectMapper.writeValueAsString(
100+
input.stream().map(OpenAIModelInput::getModelInput).collect(Collectors.toList()));
104101
// Build structured response parameters
105-
StructuredResponseCreateParams<StructuredInputOutput> clientParams = ResponseCreateParams.builder()
106-
.model(modelParameters.getModelName())
107-
.input(inputBatch)
108-
.text(StructuredInputOutput.class, JsonSchemaLocalValidation.NO)
109-
.instructions(modelParameters.getInstructionPrompt())
110-
.build();
102+
StructuredResponseCreateParams<StructuredInputOutput> clientParams =
103+
ResponseCreateParams.builder()
104+
.model(modelParameters.getModelName())
105+
.input(inputBatch)
106+
.text(StructuredInputOutput.class, JsonSchemaLocalValidation.NO)
107+
.instructions(modelParameters.getInstructionPrompt())
108+
.build();
111109

112110
// Get structured output from the model
113-
StructuredInputOutput structuredOutput = client.responses()
114-
.create(clientParams)
115-
.output()
116-
.stream()
117-
.flatMap(item -> item.message().stream())
118-
.flatMap(message -> message.content().stream())
119-
.flatMap(content -> content.outputText().stream())
120-
.findFirst()
121-
.orElse(null);
111+
StructuredInputOutput structuredOutput =
112+
client.responses().create(clientParams).output().stream()
113+
.flatMap(item -> item.message().stream())
114+
.flatMap(message -> message.content().stream())
115+
.flatMap(content -> content.outputText().stream())
116+
.findFirst()
117+
.orElse(null);
122118

123119
if (structuredOutput == null || structuredOutput.responses == null) {
124120
throw new RuntimeException("Model returned no structured responses");
125121
}
126122

127123
// return PredictionResults
128124
return structuredOutput.responses.stream()
129-
.map(response -> PredictionResult.create(
130-
OpenAIModelInput.create(response.input),
131-
OpenAIModelResponse.create(response.output)))
132-
.collect(Collectors.toList());
125+
.map(
126+
response ->
127+
PredictionResult.create(
128+
OpenAIModelInput.create(response.input),
129+
OpenAIModelResponse.create(response.output)))
130+
.collect(Collectors.toList());
133131

134132
} catch (JsonProcessingException e) {
135133
throw new RuntimeException("Failed to serialize input batch", e);
@@ -154,13 +152,12 @@ public static class Response {
154152
/**
155153
* Schema class for structured output containing multiple responses.
156154
*
157-
* <p>This class defines the expected JSON structure for OpenAI's structured output,
158-
* ensuring reliable parsing of batched inference results.
155+
* <p>This class defines the expected JSON structure for OpenAI's structured output, ensuring
156+
* reliable parsing of batched inference results.
159157
*/
160158
public static class StructuredInputOutput {
161159
@JsonProperty(required = true)
162160
@JsonPropertyDescription("Array of input-output pairs")
163161
public List<Response> responses;
164162
}
165-
166163
}

sdks/java/ml/inference/openai/src/main/java/org/apache/beam/sdk/ml/inference/openai/OpenAIModelInput.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,26 +4,28 @@
44
* distributed with this work for additional information
55
* regarding copyright ownership. The ASF licenses this file
66
* to you under the Apache License, Version 2.0 (the
7-
* License); you may not use this file except in compliance
7+
* "License"); you may not use this file except in compliance
88
* with the License. You may obtain a copy of the License at
99
*
1010
* http://www.apache.org/licenses/LICENSE-2.0
1111
*
1212
* Unless required by applicable law or agreed to in writing, software
13-
* distributed under the License is distributed on an AS IS BASIS,
13+
* distributed under the License is distributed on an "AS IS" BASIS,
1414
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1515
* See the License for the specific language governing permissions and
1616
* limitations under the License.
1717
*/
1818
package org.apache.beam.sdk.ml.inference.openai;
1919

2020
import org.apache.beam.sdk.ml.inference.remote.BaseInput;
21+
2122
/**
2223
* Input for OpenAI model inference requests.
2324
*
2425
* <p>This class encapsulates text input to be sent to OpenAI models.
2526
*
2627
* <h3>Example Usage</h3>
28+
*
2729
* <pre>{@code
2830
* OpenAIModelInput input = OpenAIModelInput.create("Translate to French: Hello");
2931
* String text = input.getModelInput(); // "Translate to French: Hello"
@@ -59,5 +61,4 @@ public String getModelInput() {
5961
public static OpenAIModelInput create(String input) {
6062
return new OpenAIModelInput(input);
6163
}
62-
6364
}

sdks/java/ml/inference/openai/src/main/java/org/apache/beam/sdk/ml/inference/openai/OpenAIModelParameters.java

Lines changed: 10 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,13 @@
44
* distributed with this work for additional information
55
* regarding copyright ownership. The ASF licenses this file
66
* to you under the Apache License, Version 2.0 (the
7-
* License); you may not use this file except in compliance
7+
* "License"); you may not use this file except in compliance
88
* with the License. You may obtain a copy of the License at
99
*
1010
* http://www.apache.org/licenses/LICENSE-2.0
1111
*
1212
* Unless required by applicable law or agreed to in writing, software
13-
* distributed under the License is distributed on an AS IS BASIS,
13+
* distributed under the License is distributed on an "AS IS" BASIS,
1414
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1515
* See the License for the specific language governing permissions and
1616
* limitations under the License.
@@ -22,10 +22,11 @@
2222
/**
2323
* Configuration parameters required for OpenAI model inference.
2424
*
25-
* <p>This class encapsulates all configuration needed to initialize and communicate with
26-
* OpenAI's API, including authentication credentials, model selection, and inference instructions.
25+
* <p>This class encapsulates all configuration needed to initialize and communicate with OpenAI's
26+
* API, including authentication credentials, model selection, and inference instructions.
2727
*
2828
* <h3>Example Usage</h3>
29+
*
2930
* <pre>{@code
3031
* OpenAIModelParameters params = OpenAIModelParameters.builder()
3132
* .apiKey("sk-...")
@@ -36,6 +37,7 @@
3637
*
3738
* @see OpenAIModelHandler
3839
*/
40+
@SuppressWarnings("nullness")
3941
public class OpenAIModelParameters implements BaseModelParameters {
4042

4143
private final String apiKey;
@@ -64,14 +66,12 @@ public static Builder builder() {
6466
return new Builder();
6567
}
6668

67-
6869
public static class Builder {
6970
private String apiKey;
7071
private String modelName;
7172
private String instructionPrompt;
7273

73-
private Builder() {
74-
}
74+
private Builder() {}
7575

7676
/**
7777
* Sets the OpenAI API key for authentication.
@@ -93,9 +93,8 @@ public Builder modelName(String modelName) {
9393
return this;
9494
}
9595
/**
96-
* Sets the instruction prompt for the model.
97-
* This prompt provides context or instructions to the model about how to process
98-
* the input text.
96+
* Sets the instruction prompt for the model. This prompt provides context or instructions to
97+
* the model about how to process the input text.
9998
*
10099
* @param prompt the instruction text (required)
101100
*/
@@ -104,9 +103,7 @@ public Builder instructionPrompt(String prompt) {
104103
return this;
105104
}
106105

107-
/**
108-
* Builds the {@link OpenAIModelParameters} instance.
109-
*/
106+
/** Builds the {@link OpenAIModelParameters} instance. */
110107
public OpenAIModelParameters build() {
111108
return new OpenAIModelParameters(this);
112109
}

sdks/java/ml/inference/openai/src/main/java/org/apache/beam/sdk/ml/inference/openai/OpenAIModelResponse.java

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,13 @@
44
* distributed with this work for additional information
55
* regarding copyright ownership. The ASF licenses this file
66
* to you under the Apache License, Version 2.0 (the
7-
* License); you may not use this file except in compliance
7+
* "License"); you may not use this file except in compliance
88
* with the License. You may obtain a copy of the License at
99
*
1010
* http://www.apache.org/licenses/LICENSE-2.0
1111
*
1212
* Unless required by applicable law or agreed to in writing, software
13-
* distributed under the License is distributed on an AS IS BASIS,
13+
* distributed under the License is distributed on an "AS IS" BASIS,
1414
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1515
* See the License for the specific language governing permissions and
1616
* limitations under the License.
@@ -21,9 +21,11 @@
2121

2222
/**
2323
* Response from OpenAI model inference results.
24+
*
2425
* <p>This class encapsulates the text output returned from OpenAI models..
2526
*
2627
* <h3>Example Usage</h3>
28+
*
2729
* <pre>{@code
2830
* OpenAIModelResponse response = OpenAIModelResponse.create("Bonjour");
2931
* String output = response.getModelResponse(); // "Bonjour"
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
/** OpenAI model handler for remote inference. */
20+
package org.apache.beam.sdk.ml.inference.openai;

0 commit comments

Comments
 (0)