Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/candidate-9.10.x'
Browse files Browse the repository at this point in the history
Signed-off-by: Jake Smith <[email protected]>

# Conflicts:
#	commons-hpcc/pom.xml
#	dfsclient/pom.xml
#	pom.xml
#	spark-hpcc/pom.xml
#	wsclient/pom.xml
  • Loading branch information
jakesmith committed Feb 14, 2025
2 parents f612ccb + d023ef8 commit ffb8d70
Show file tree
Hide file tree
Showing 5 changed files with 114 additions and 15 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/BuildTestCandidateAndMaster.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:

# speed things up with caching from https://docs.github.com/en/actions/guides/building-and-testing-java-with-maven
- name: Cache Maven packages
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/baremetal-regression-suite.yml
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ jobs:
# speed things up with caching from https://docs.github.com/en/actions/guides/building-and-testing-java-with-maven
- name: Cache Maven packages
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ private FieldDef pruneFieldDefinition(FieldDef originalRecordDef, String path)

if (fieldInfo.shouldCullChildren == false)
{
return originalRecordDef;
return new FieldDef(originalRecordDef);
}

// Datasets are a special case. They will not have a component
Expand Down
86 changes: 74 additions & 12 deletions dfsclient/src/main/java/org/hpccsystems/dfs/client/FileUtility.java
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,14 @@ private static class TaskContext
{
private static class TaskOperation
{
public String currentOperationDesc = "";
public String operationDesc = "";
public long operationStartNS = 0;
public long operationEndNS = 0;

public boolean isActive = true;
public boolean success = false;

private List<TaskOperation> childOperations = new ArrayList<TaskOperation>();

public List<String> errorMessages = new ArrayList<String>();
public List<String> warnMessages = new ArrayList<String>();
Expand All @@ -99,29 +104,61 @@ private static class TaskOperation

public Span operationSpan = null;

public JSONObject end(boolean success)
public void addChildOperation(TaskOperation op)
{
if (success)
synchronized(childOperations)
{
operationSpan.setStatus(StatusCode.OK);
childOperations.add(op);
}
else
}

public JSONObject end(boolean _success)
{
if (isActive)
{
operationSpan.setStatus(StatusCode.ERROR);
}
success = _success;
if (operationSpan != null)
{
if (success)
{
operationSpan.setStatus(StatusCode.OK);
}
else
{
operationSpan.setStatus(StatusCode.ERROR);
}

operationSpan.end();
operationSpan.end();
}

long totalOperationTime = System.nanoTime();
totalOperationTime -= operationStartNS;
operationEndNS = System.nanoTime();

isActive = false;
}

long totalOperationTime = operationEndNS - operationStartNS;
double timeInSeconds = (double) totalOperationTime / 1_000_000_000.0;

JSONObject results = new JSONObject();

results.put("operation", currentOperationDesc);
results.put("operation", operationDesc);
results.put("successful", success);

JSONArray childResults = new JSONArray();
synchronized(childOperations)
{
for (TaskOperation childOp : childOperations)
{
if (childOp.isActive)
{
warnMessages.add("Child operation: " + childOp.operationDesc + " did not complete.");
}

childResults.put(childOp.end(success));
}
}
results.put("childOperations", childResults);

JSONArray errors = new JSONArray();
for (String err : errorMessages)
{
Expand Down Expand Up @@ -289,7 +326,7 @@ private void setCurrentOperation(TaskOperation op)
public void startOperation(String operationName)
{
TaskOperation op = new TaskOperation();
op.currentOperationDesc = operationName;
op.operationDesc = operationName;
op.operationStartNS = System.nanoTime();

Span parentSpan = null;
Expand All @@ -303,6 +340,23 @@ public void startOperation(String operationName)
setCurrentOperation(op);
}

public TaskOperation startChildOperation(String operationName)
{
if (!hasCurrentOperation())
{
return null;
}

TaskOperation parentOp = getCurrentOperation();

TaskOperation childOp = new TaskOperation();
childOp.operationDesc = operationName;
childOp.operationStartNS = System.nanoTime();

parentOp.addChildOperation(childOp);
return childOp;
}

public void endOperation()
{
endOperation(true);
Expand Down Expand Up @@ -954,6 +1008,8 @@ public void run()
{
try
{
TaskContext.TaskOperation fileReadOperation = context.startChildOperation("File Part: " + filePart.getThisPart());

HpccRemoteFileReader.FileReadContext readContext = new HpccRemoteFileReader.FileReadContext();
readContext.parentSpan = context.getCurrentOperation().operationSpan;
readContext.originalRD = recordDef;
Expand All @@ -971,10 +1027,16 @@ public void run()
HPCCRecord record = fileReader.next();
recCount++;
}

fileReadOperation.recordsRead.addAndGet(recCount);
context.getCurrentOperation().recordsRead.addAndGet(recCount);

fileReader.close();

fileReadOperation.bytesRead.addAndGet(fileReader.getStreamPosition());
context.getCurrentOperation().bytesRead.addAndGet(fileReader.getStreamPosition());

fileReadOperation.end(true);
}
catch (Exception e)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,43 @@ public final void testGetProjectedRecordDefinition() throws HpccFileException
Assert.assertEquals(3, projectedRecordDefinition.getNumDefs());
}

private boolean fieldDefinitionsAreSeparate(FieldDef fieldDef1, FieldDef fieldDef2)
{
for (int i = 0; i < fieldDef1.getNumDefs(); i++)
{
if (fieldDef1.getDef(i) == fieldDef2.getDef(i))
{
return false;
}

if (!fieldDefinitionsAreSeparate(fieldDef1.getDef(i), fieldDef2.getDef(i)))
{
return false;
}
}

return true;
}

@Test
public final void testProjectedRecordDefCloning() throws Exception
{
FieldDef recordDef = mockHPCCFile.getRecordDefinition();

String[] fieldNames = new String[recordDef.getNumDefs()];
for (int i = 0; i < recordDef.getNumDefs(); i++)
{
fieldNames[i] = recordDef.getDef(i).getFieldName();
}

String projectList = String.join(",", fieldNames);
mockHPCCFile.setProjectList(projectList);
FieldDef projectedRecordDefinition = mockHPCCFile.getProjectedRecordDefinition();

// Ensure the projected record definition is a clone and not modifying the original record definition
assert(fieldDefinitionsAreSeparate(recordDef, projectedRecordDefinition));
}

@Test
public final void testIsIndex()
{
Expand Down

0 comments on commit ffb8d70

Please sign in to comment.