Skip to content

Commit ffb8d70

Browse files
committed
Merge remote-tracking branch 'origin/candidate-9.10.x'
Signed-off-by: Jake Smith <[email protected]> # Conflicts: # commons-hpcc/pom.xml # dfsclient/pom.xml # pom.xml # spark-hpcc/pom.xml # wsclient/pom.xml
2 parents f612ccb + d023ef8 commit ffb8d70

File tree

5 files changed

+114
-15
lines changed

5 files changed

+114
-15
lines changed

.github/workflows/BuildTestCandidateAndMaster.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ jobs:
1919

2020
# speed things up with caching from https://docs.github.com/en/actions/guides/building-and-testing-java-with-maven
2121
- name: Cache Maven packages
22-
uses: actions/cache@v2
22+
uses: actions/cache@v3
2323
with:
2424
path: ~/.m2
2525
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}

.github/workflows/baremetal-regression-suite.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,7 @@ jobs:
175175
176176
# speed things up with caching from https://docs.github.com/en/actions/guides/building-and-testing-java-with-maven
177177
- name: Cache Maven packages
178-
uses: actions/cache@v2
178+
uses: actions/cache@v3
179179
with:
180180
path: ~/.m2
181181
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}

dfsclient/src/main/java/org/hpccsystems/dfs/client/ColumnPruner.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,7 @@ private FieldDef pruneFieldDefinition(FieldDef originalRecordDef, String path)
173173

174174
if (fieldInfo.shouldCullChildren == false)
175175
{
176-
return originalRecordDef;
176+
return new FieldDef(originalRecordDef);
177177
}
178178

179179
// Datasets are a special case. They will not have a component

dfsclient/src/main/java/org/hpccsystems/dfs/client/FileUtility.java

Lines changed: 74 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -84,9 +84,14 @@ private static class TaskContext
8484
{
8585
private static class TaskOperation
8686
{
87-
public String currentOperationDesc = "";
87+
public String operationDesc = "";
8888
public long operationStartNS = 0;
89+
public long operationEndNS = 0;
8990

91+
public boolean isActive = true;
92+
public boolean success = false;
93+
94+
private List<TaskOperation> childOperations = new ArrayList<TaskOperation>();
9095

9196
public List<String> errorMessages = new ArrayList<String>();
9297
public List<String> warnMessages = new ArrayList<String>();
@@ -99,29 +104,61 @@ private static class TaskOperation
99104

100105
public Span operationSpan = null;
101106

102-
public JSONObject end(boolean success)
107+
public void addChildOperation(TaskOperation op)
103108
{
104-
if (success)
109+
synchronized(childOperations)
105110
{
106-
operationSpan.setStatus(StatusCode.OK);
111+
childOperations.add(op);
107112
}
108-
else
113+
}
114+
115+
public JSONObject end(boolean _success)
116+
{
117+
if (isActive)
109118
{
110-
operationSpan.setStatus(StatusCode.ERROR);
111-
}
119+
success = _success;
120+
if (operationSpan != null)
121+
{
122+
if (success)
123+
{
124+
operationSpan.setStatus(StatusCode.OK);
125+
}
126+
else
127+
{
128+
operationSpan.setStatus(StatusCode.ERROR);
129+
}
112130

113-
operationSpan.end();
131+
operationSpan.end();
132+
}
114133

115-
long totalOperationTime = System.nanoTime();
116-
totalOperationTime -= operationStartNS;
134+
operationEndNS = System.nanoTime();
117135

136+
isActive = false;
137+
}
138+
139+
long totalOperationTime = operationEndNS - operationStartNS;
118140
double timeInSeconds = (double) totalOperationTime / 1_000_000_000.0;
119141

120142
JSONObject results = new JSONObject();
121143

122-
results.put("operation", currentOperationDesc);
144+
results.put("operation", operationDesc);
123145
results.put("successful", success);
124146

147+
JSONArray childResults = new JSONArray();
148+
synchronized(childOperations)
149+
{
150+
for (TaskOperation childOp : childOperations)
151+
{
152+
if (childOp.isActive)
153+
{
154+
warnMessages.add("Child operation: " + childOp.operationDesc + " did not complete.");
155+
}
156+
157+
childResults.put(childOp.end(success));
158+
}
159+
}
160+
results.put("childOperations", childResults);
161+
125162
JSONArray errors = new JSONArray();
126163
for (String err : errorMessages)
127164
{
@@ -289,7 +326,7 @@ private void setCurrentOperation(TaskOperation op)
289326
public void startOperation(String operationName)
290327
{
291328
TaskOperation op = new TaskOperation();
292-
op.currentOperationDesc = operationName;
329+
op.operationDesc = operationName;
293330
op.operationStartNS = System.nanoTime();
294331

295332
Span parentSpan = null;
@@ -303,6 +340,23 @@ public void startOperation(String operationName)
303340
setCurrentOperation(op);
304341
}
305342

343+
public TaskOperation startChildOperation(String operationName)
344+
{
345+
if (!hasCurrentOperation())
346+
{
347+
return null;
348+
}
349+
350+
TaskOperation parentOp = getCurrentOperation();
351+
352+
TaskOperation childOp = new TaskOperation();
353+
childOp.operationDesc = operationName;
354+
childOp.operationStartNS = System.nanoTime();
355+
356+
parentOp.addChildOperation(childOp);
357+
return childOp;
358+
}
359+
306360
public void endOperation()
307361
{
308362
endOperation(true);
@@ -954,6 +1008,8 @@ public void run()
9541008
{
9551009
try
9561010
{
1011+
TaskContext.TaskOperation fileReadOperation = context.startChildOperation("File Part: " + filePart.getThisPart());
1012+
9571013
HpccRemoteFileReader.FileReadContext readContext = new HpccRemoteFileReader.FileReadContext();
9581014
readContext.parentSpan = context.getCurrentOperation().operationSpan;
9591015
readContext.originalRD = recordDef;
@@ -971,10 +1027,16 @@ public void run()
9711027
HPCCRecord record = fileReader.next();
9721028
recCount++;
9731029
}
1030+
1031+
fileReadOperation.recordsRead.addAndGet(recCount);
9741032
context.getCurrentOperation().recordsRead.addAndGet(recCount);
9751033

9761034
fileReader.close();
1035+
1036+
fileReadOperation.bytesRead.addAndGet(fileReader.getStreamPosition());
9771037
context.getCurrentOperation().bytesRead.addAndGet(fileReader.getStreamPosition());
1038+
1039+
fileReadOperation.end(true);
9781040
}
9791041
catch (Exception e)
9801042
{

dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSHPCCFile.java

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -192,6 +192,43 @@ public final void testGetProjectedRecordDefinition() throws HpccFileException
192192
Assert.assertEquals(3, projectedRecordDefinition.getNumDefs());
193193
}
194194

195+
private boolean fieldDefinitionsAreSeparate(FieldDef fieldDef1, FieldDef fieldDef2)
196+
{
197+
for (int i = 0; i < fieldDef1.getNumDefs(); i++)
198+
{
199+
if (fieldDef1.getDef(i) == fieldDef2.getDef(i))
200+
{
201+
return false;
202+
}
203+
204+
if (!fieldDefinitionsAreSeparate(fieldDef1.getDef(i), fieldDef2.getDef(i)))
205+
{
206+
return false;
207+
}
208+
}
209+
210+
return true;
211+
}
212+
213+
@Test
214+
public final void testProjectedRecordDefCloning() throws Exception
215+
{
216+
FieldDef recordDef = mockHPCCFile.getRecordDefinition();
217+
218+
String[] fieldNames = new String[recordDef.getNumDefs()];
219+
for (int i = 0; i < recordDef.getNumDefs(); i++)
220+
{
221+
fieldNames[i] = recordDef.getDef(i).getFieldName();
222+
}
223+
224+
String projectList = String.join(",", fieldNames);
225+
mockHPCCFile.setProjectList(projectList);
226+
FieldDef projectedRecordDefinition = mockHPCCFile.getProjectedRecordDefinition();
227+
228+
// Ensure the projected record definition is a clone and not modifying the original record definition
229+
assert(fieldDefinitionsAreSeparate(recordDef, projectedRecordDefinition));
230+
}
231+
195232
@Test
196233
public final void testIsIndex()
197234
{

0 commit comments

Comments
 (0)