From 0b3fac68453df3c5481b1554c080f56765faf9d6 Mon Sep 17 00:00:00 2001 From: Jeremy Clements Date: Tue, 10 Jan 2023 15:17:52 -0500 Subject: [PATCH 01/21] HPCC-27859 Convert remaining useGrid to useFluentGrid Signed-off-by: Jeremy Clements --- .../src-react/components/PackageMapParts.tsx | 94 +++++++++---------- .../components/forms/AddPackageMapPart.tsx | 11 +-- esp/src/src/nls/hpcc.ts | 2 +- 3 files changed, 47 insertions(+), 60 deletions(-) diff --git a/esp/src/src-react/components/PackageMapParts.tsx b/esp/src/src-react/components/PackageMapParts.tsx index 921ace82339..111d712b261 100644 --- a/esp/src/src-react/components/PackageMapParts.tsx +++ b/esp/src/src-react/components/PackageMapParts.tsx @@ -1,15 +1,12 @@ import * as React from "react"; -import { CommandBar, ContextualMenuItemType, ICommandBarItemProps, MessageBar, MessageBarType } from "@fluentui/react"; -import { useConst } from "@fluentui/react-hooks"; +import { CommandBar, ContextualMenuItemType, ICommandBarItemProps, Link } from "@fluentui/react"; import { scopedLogger } from "@hpcc-js/util"; import { SizeMe } from "react-sizeme"; import * as parser from "dojox/xml/parser"; -import * as Observable from "dojo/store/Observable"; -import { Memory } from "src/store/Memory"; import * as WsPackageMaps from "src/WsPackageMaps"; import nlsHPCC from "src/nlsHPCC"; import { useConfirm } from "../hooks/confirm"; -import { useGrid } from "../hooks/grid"; +import { useFluentGrid } from "../hooks/grid"; import { pushUrl } from "../util/history"; import { ShortVerticalDivider } from "./Common"; import { AddPackageMapPart } from "./forms/AddPackageMapPart"; @@ -33,14 +30,12 @@ export const PackageMapParts: React.FunctionComponent = ({ const [_package, setPackage] = React.useState(undefined); const [showAddPartForm, setShowAddPartForm] = React.useState(false); const [uiState, setUIState] = React.useState({ ...defaultUIState }); - - const [showError, setShowError] = React.useState(false); - const [errorMessage, setErrorMessage] = React.useState(""); + const [data, setData] = React.useState([]); // Grid --- - const store = useConst(new Observable(new Memory("Part"))); - const { Grid, selection, refreshTable, copyButtons } = useGrid({ - store, + const { Grid, selection, copyButtons } = useFluentGrid({ + data, + primaryID: "Part", sort: { attribute: "Part", descending: false }, filename: "packageMapParts", columns: { @@ -48,44 +43,60 @@ export const PackageMapParts: React.FunctionComponent = ({ Part: { label: nlsHPCC.Parts, formatter: React.useCallback(function (part, row) { - return `${part}`; + return {part}; }, [name]) }, } }); + const refreshData = React.useCallback(() => { + WsPackageMaps.getPackageMapById({ packageMap: name }) + .then(({ GetPackageMapByIdResponse }) => { + const xml = parser.parse(GetPackageMapByIdResponse?.Info); + const parts = [...xml.getElementsByTagName("Part")].map(part => { + return { + Part: part.attributes[0].nodeValue + }; + }); + setData(parts); + }) + .catch(err => logger.error(err)) + ; + }, [name]); + + React.useEffect(() => { + refreshData(); + }, [refreshData]); + const [DeleteConfirm, setShowDeleteConfirm] = useConfirm({ title: nlsHPCC.Delete, message: nlsHPCC.YouAreAboutToDeleteThisPart, onSubmit: React.useCallback(() => { + const requests = []; selection.forEach((item, idx) => { - WsPackageMaps.RemovePartFromPackageMap({ - request: { - PackageMap: name.split("::")[1], - Target: _package?.Target, - PartName: item.Part - } - }) - .then(({ RemovePartFromPackageMapResponse, Exceptions }) => { - if (RemovePartFromPackageMapResponse?.status?.Code === 0) { - store.remove(item.Part); - refreshTable(); - } else if (Exceptions?.Exception.length > 0) { - setShowError(true); - setErrorMessage(Exceptions?.Exception[0].Message); + requests.push( + WsPackageMaps.RemovePartFromPackageMap({ + request: { + PackageMap: name.split("::")[1], + Target: _package?.Target, + PartName: item.Part } }) + ); + Promise + .all(requests) + .then(() => refreshData()) .catch(err => logger.error(err)) ; }); - }, [_package?.Target, name, refreshTable, selection, store]) + }, [_package?.Target, name, refreshData, selection]) }); // Command Bar --- const buttons = React.useMemo((): ICommandBarItemProps[] => [ { key: "refresh", text: nlsHPCC.Refresh, iconProps: { iconName: "Refresh" }, - onClick: () => refreshTable() + onClick: () => refreshData() }, { key: "divider_1", itemType: ContextualMenuItemType.Divider, onRender: () => }, { @@ -109,23 +120,7 @@ export const PackageMapParts: React.FunctionComponent = ({ } } }, - ], [name, refreshTable, selection, setShowDeleteConfirm, uiState.hasSelection]); - - React.useEffect(() => { - WsPackageMaps.getPackageMapById({ packageMap: name }) - .then(({ GetPackageMapByIdResponse }) => { - const xml = parser.parse(GetPackageMapByIdResponse?.Info); - const parts = [...xml.getElementsByTagName("Part")].map(part => { - return { - Part: part.attributes[0].nodeValue - }; - }); - store.setData(parts); - refreshTable(); - }) - .catch(err => logger.error(err)) - ; - }, [store, name, refreshTable]); + ], [name, refreshData, selection, setShowDeleteConfirm, uiState.hasSelection]); React.useEffect(() => { WsPackageMaps.PackageMapQuery({}) @@ -148,11 +143,6 @@ export const PackageMapParts: React.FunctionComponent = ({ }, [selection]); return <> - {showError && - setShowError(false)} dismissButtonAriaLabel="Close"> - {errorMessage} - - } {({ size }) => } @@ -162,8 +152,8 @@ export const PackageMapParts: React.FunctionComponent = ({ /> } ; diff --git a/esp/src/src-react/components/forms/AddPackageMapPart.tsx b/esp/src/src-react/components/forms/AddPackageMapPart.tsx index 7fdc1813044..cae4d80a7c1 100644 --- a/esp/src/src-react/components/forms/AddPackageMapPart.tsx +++ b/esp/src/src-react/components/forms/AddPackageMapPart.tsx @@ -37,19 +37,17 @@ const defaultValues: AddPackageMapPartValues = { interface AddPackageMapPartProps { showForm: boolean; setShowForm: (_: boolean) => void; - store: any; packageMap: string; target: string; - refreshTable: (_: boolean) => void; + refreshData: () => void; } export const AddPackageMapPart: React.FunctionComponent = ({ showForm, setShowForm, - store, packageMap, target, - refreshTable, + refreshData, }) => { const { handleSubmit, control, reset } = useForm({ defaultValues }); @@ -66,8 +64,7 @@ export const AddPackageMapPart: React.FunctionComponent .then(({ AddPartToPackageMapResponse, Exceptions }) => { if (AddPartToPackageMapResponse?.status?.Code === 0) { closeForm(); - store.add({ Part: data.PartName }); - refreshTable(true); + if (refreshData) refreshData(); reset(defaultValues); } else if (Exceptions) { closeForm(); @@ -81,7 +78,7 @@ export const AddPackageMapPart: React.FunctionComponent logger.error(err); } )(); - }, [closeForm, handleSubmit, packageMap, refreshTable, reset, store, target]); + }, [closeForm, handleSubmit, packageMap, refreshData, reset, target]); return diff --git a/esp/src/src/nls/hpcc.ts b/esp/src/src/nls/hpcc.ts index f7a51fc4ba7..599c5548846 100644 --- a/esp/src/src/nls/hpcc.ts +++ b/esp/src/src/nls/hpcc.ts @@ -354,7 +354,7 @@ export = { GetLogicalFilePart: "Logical File Part", GetProtectedList: "Protected List", GetValue: "Value", - GetPart: "Part", + GetPart: "Get Part", GetSoftwareInformation: "Get Software Information", Graph: "Graph", Graphs: "Graphs", From 095abecdfe518c73067d4e63e8048f767ebca03b Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Wed, 18 Jan 2023 14:45:56 +0000 Subject: [PATCH 02/21] HPCC-28794 Docker build image hangs on some machines Not sure of the cause or why my machine is affected, but there are similar reports: https://www.google.com/search?q=docker+build+image+hangs+keyserver Signed-off-by: Gordon Smith --- dockerfiles/platform-build/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dockerfiles/platform-build/Dockerfile b/dockerfiles/platform-build/Dockerfile index 4cd079a42bd..67843652c1e 100644 --- a/dockerfiles/platform-build/Dockerfile +++ b/dockerfiles/platform-build/Dockerfile @@ -25,7 +25,7 @@ FROM ${CR_REPO}/${CR_USER}/${CR_CONTAINER_NAME}:${BASE_VER} RUN apt-get update RUN apt-get install -y dirmngr gnupg apt-transport-https ca-certificates software-properties-common -RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF +RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF RUN apt-add-repository 'deb https://download.mono-project.com/repo/ubuntu stable-focal main' RUN apt install -y mono-complete groff-base From eec5871eb5e09c1a5ebcdc25e0850d9dc963c85b Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Wed, 18 Jan 2023 18:07:07 +0000 Subject: [PATCH 03/21] Split off 8.6.72 Signed-off-by: Gavin Halliday --- helm/hpcc/Chart.yaml | 4 ++-- helm/hpcc/templates/_helpers.tpl | 2 +- helm/hpcc/templates/dali.yaml | 2 +- helm/hpcc/templates/dfuserver.yaml | 2 +- helm/hpcc/templates/eclagent.yaml | 4 ++-- helm/hpcc/templates/eclccserver.yaml | 4 ++-- helm/hpcc/templates/eclscheduler.yaml | 2 +- helm/hpcc/templates/esp.yaml | 2 +- helm/hpcc/templates/localroxie.yaml | 2 +- helm/hpcc/templates/roxie.yaml | 8 ++++---- helm/hpcc/templates/sasha.yaml | 2 +- helm/hpcc/templates/thor.yaml | 10 +++++----- version.cmake | 2 +- 13 files changed, 23 insertions(+), 23 deletions(-) diff --git a/helm/hpcc/Chart.yaml b/helm/hpcc/Chart.yaml index f051dbb6a47..90fbea1e38f 100644 --- a/helm/hpcc/Chart.yaml +++ b/helm/hpcc/Chart.yaml @@ -6,9 +6,9 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 8.6.71-closedown0 +version: 8.6.73-closedown0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 8.6.71-closedown0 +appVersion: 8.6.73-closedown0 diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index 20e1974132b..13f1256f605 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -1180,7 +1180,7 @@ kind: Service metadata: name: {{ $lvars.serviceName | quote }} labels: - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if $lvars.labels }} {{ toYaml $lvars.labels | indent 4 }} {{- end }} diff --git a/helm/hpcc/templates/dali.yaml b/helm/hpcc/templates/dali.yaml index 34953590be6..e5fcdc6c62f 100644 --- a/helm/hpcc/templates/dali.yaml +++ b/helm/hpcc/templates/dali.yaml @@ -83,7 +83,7 @@ spec: run: {{ $dali.name | quote }} server: {{ $dali.name | quote }} app: dali - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} {{- end }} diff --git a/helm/hpcc/templates/dfuserver.yaml b/helm/hpcc/templates/dfuserver.yaml index 0bddd2e25b4..3dd306bd996 100644 --- a/helm/hpcc/templates/dfuserver.yaml +++ b/helm/hpcc/templates/dfuserver.yaml @@ -55,7 +55,7 @@ spec: labels: run: {{ .name | quote }} accessDali: "yes" - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclagent.yaml b/helm/hpcc/templates/eclagent.yaml index c7b2b970c4a..61bc1a6d73c 100644 --- a/helm/hpcc/templates/eclagent.yaml +++ b/helm/hpcc/templates/eclagent.yaml @@ -57,7 +57,7 @@ data: labels: accessDali: "yes" accessEsp: "yes" - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -131,7 +131,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclccserver.yaml b/helm/hpcc/templates/eclccserver.yaml index e579f1ef93e..eebb592e42f 100644 --- a/helm/hpcc/templates/eclccserver.yaml +++ b/helm/hpcc/templates/eclccserver.yaml @@ -56,7 +56,7 @@ data: labels: accessDali: "yes" accessEsp: "yes" - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -140,7 +140,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclscheduler.yaml b/helm/hpcc/templates/eclscheduler.yaml index 6ea633465e5..bf372324d6d 100644 --- a/helm/hpcc/templates/eclscheduler.yaml +++ b/helm/hpcc/templates/eclscheduler.yaml @@ -64,7 +64,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: "no" - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/esp.yaml b/helm/hpcc/templates/esp.yaml index 4db02f414b0..e87aca0a603 100644 --- a/helm/hpcc/templates/esp.yaml +++ b/helm/hpcc/templates/esp.yaml @@ -114,7 +114,7 @@ spec: server: {{ .name | quote }} accessDali: "yes" app: {{ $application }} - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} {{- end }} diff --git a/helm/hpcc/templates/localroxie.yaml b/helm/hpcc/templates/localroxie.yaml index 8db416b67ea..8ecea1ac585 100644 --- a/helm/hpcc/templates/localroxie.yaml +++ b/helm/hpcc/templates/localroxie.yaml @@ -68,7 +68,7 @@ spec: server: {{ $servername | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/roxie.yaml b/helm/hpcc/templates/roxie.yaml index 6ee90e2b986..c0e839a8333 100644 --- a/helm/hpcc/templates/roxie.yaml +++ b/helm/hpcc/templates/roxie.yaml @@ -115,7 +115,7 @@ spec: labels: run: {{ $commonCtx.toponame | quote }} roxie-cluster: {{ $roxie.name | quote }} - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} @@ -175,7 +175,7 @@ kind: Service metadata: name: {{ $commonCtx.toponame | quote }} labels: - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 spec: ports: - port: {{ $commonCtx.topoport }} @@ -234,7 +234,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} @@ -335,7 +335,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} diff --git a/helm/hpcc/templates/sasha.yaml b/helm/hpcc/templates/sasha.yaml index ff3ca7651c5..290ebd7096f 100644 --- a/helm/hpcc/templates/sasha.yaml +++ b/helm/hpcc/templates/sasha.yaml @@ -52,7 +52,7 @@ spec: run: {{ $serviceName | quote }} server: {{ $serviceName | quote }} accessDali: {{ (has "dali" $sasha.access) | ternary "yes" "no" | quote }} - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey $sasha "labels" }} {{ toYaml $sasha.labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/thor.yaml b/helm/hpcc/templates/thor.yaml index 7c01fed755c..ac3b9cede4a 100644 --- a/helm/hpcc/templates/thor.yaml +++ b/helm/hpcc/templates/thor.yaml @@ -82,7 +82,7 @@ data: labels: accessDali: "yes" accessEsp: "yes" - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -148,7 +148,7 @@ data: accessEsp: "yes" app: "thor" component: "thormanager" - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- if hasKey $thorScope "labels" }} @@ -215,7 +215,7 @@ data: accessEsp: "true" app: "thor" component: "thorworker" - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- if hasKey $thorScope "labels" }} @@ -348,7 +348,7 @@ spec: accessEsp: {{ $commonCtx.eclAgentUseChildProcesses | ternary "yes" "no" | quote }} app: "thor" component: "thor-eclagent" - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 instance: {{ $commonCtx.eclAgentName | quote }} {{- if hasKey $commonCtx.me "labels" }} {{ toYaml $commonCtx.me.labels | indent 8 }} @@ -409,7 +409,7 @@ spec: accessEsp: "no" app: "thor" component: "thor-thoragent" - helmVersion: 8.6.71-closedown0 + helmVersion: 8.6.73-closedown0 instance: {{ $commonCtx.thorAgentName | quote }} {{- if hasKey $commonCtx.me "labels" }} {{ toYaml $commonCtx.me.labels | indent 8 }} diff --git a/version.cmake b/version.cmake index 1f3eb834053..e95ca852fcb 100644 --- a/version.cmake +++ b/version.cmake @@ -5,7 +5,7 @@ set ( HPCC_NAME "Community Edition" ) set ( HPCC_PROJECT "community" ) set ( HPCC_MAJOR 8 ) set ( HPCC_MINOR 6 ) -set ( HPCC_POINT 71 ) +set ( HPCC_POINT 73 ) set ( HPCC_MATURITY "closedown" ) set ( HPCC_SEQUENCE 0 ) ### From d3076e2c120d50de4bb802b28baf07ad752c0261 Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Wed, 18 Jan 2023 18:16:16 +0000 Subject: [PATCH 04/21] HPCC-28753 Remove parameter to go_rc that causes problems Signed-off-by: Gavin Halliday --- tools/tagging/gorc.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/tagging/gorc.sh b/tools/tagging/gorc.sh index b7330672349..62e13e19ba6 100755 --- a/tools/tagging/gorc.sh +++ b/tools/tagging/gorc.sh @@ -46,5 +46,5 @@ read -n 1 -s for f in $all ; do cd $gitroot/$f echo "Process $f" - $hpccdir/cmake_modules/go_rc.sh $* + $hpccdir/cmake_modules/go_rc.sh done From 861a7f115dab3d51ea9f430b4559f4c3e9b76562 Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Wed, 18 Jan 2023 18:16:19 +0000 Subject: [PATCH 05/21] Split off 8.8.36 Signed-off-by: Gavin Halliday --- helm/hpcc/Chart.yaml | 4 ++-- helm/hpcc/templates/_helpers.tpl | 2 +- helm/hpcc/templates/dali.yaml | 2 +- helm/hpcc/templates/dfuserver.yaml | 2 +- helm/hpcc/templates/eclagent.yaml | 4 ++-- helm/hpcc/templates/eclccserver.yaml | 4 ++-- helm/hpcc/templates/eclscheduler.yaml | 2 +- helm/hpcc/templates/esp.yaml | 2 +- helm/hpcc/templates/localroxie.yaml | 2 +- helm/hpcc/templates/roxie.yaml | 8 ++++---- helm/hpcc/templates/sasha.yaml | 2 +- helm/hpcc/templates/thor.yaml | 10 +++++----- version.cmake | 2 +- 13 files changed, 23 insertions(+), 23 deletions(-) diff --git a/helm/hpcc/Chart.yaml b/helm/hpcc/Chart.yaml index b3490cffc8a..e09f99947f3 100644 --- a/helm/hpcc/Chart.yaml +++ b/helm/hpcc/Chart.yaml @@ -6,9 +6,9 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 8.8.35-closedown0 +version: 8.8.37-closedown0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 8.8.35-closedown0 +appVersion: 8.8.37-closedown0 diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index fd71638cbe5..4e0e7093cc2 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -1185,7 +1185,7 @@ kind: Service metadata: name: {{ $lvars.serviceName | quote }} labels: - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $.root "instance" $lvars.serviceName ) | indent 4 }} {{- if $lvars.labels }} {{ toYaml $lvars.labels | indent 4 }} diff --git a/helm/hpcc/templates/dali.yaml b/helm/hpcc/templates/dali.yaml index c5adeacf0a9..55ff48c2fe7 100644 --- a/helm/hpcc/templates/dali.yaml +++ b/helm/hpcc/templates/dali.yaml @@ -81,7 +81,7 @@ spec: run: {{ $dali.name | quote }} server: {{ $dali.name | quote }} app: dali - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} {{- end }} diff --git a/helm/hpcc/templates/dfuserver.yaml b/helm/hpcc/templates/dfuserver.yaml index 7cabcf3d81b..6a86281c8be 100644 --- a/helm/hpcc/templates/dfuserver.yaml +++ b/helm/hpcc/templates/dfuserver.yaml @@ -56,7 +56,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dfuserver" "name" "dfuserver" "instance" .name) | indent 8 }} run: {{ .name | quote }} accessDali: "yes" - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclagent.yaml b/helm/hpcc/templates/eclagent.yaml index 483d24c7daa..d43a2e96d63 100644 --- a/helm/hpcc/templates/eclagent.yaml +++ b/helm/hpcc/templates/eclagent.yaml @@ -58,7 +58,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" $apptype "name" "eclagent" "instance" $appJobName) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -133,7 +133,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclccserver.yaml b/helm/hpcc/templates/eclccserver.yaml index 716ff4382d3..da8b736220f 100644 --- a/helm/hpcc/templates/eclccserver.yaml +++ b/helm/hpcc/templates/eclccserver.yaml @@ -57,7 +57,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclccserver" "name" "eclccserver" "instance" $compileJobName ) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -142,7 +142,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclscheduler.yaml b/helm/hpcc/templates/eclscheduler.yaml index 78e2faf143b..30a9a96aad1 100644 --- a/helm/hpcc/templates/eclscheduler.yaml +++ b/helm/hpcc/templates/eclscheduler.yaml @@ -64,7 +64,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: "no" - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/esp.yaml b/helm/hpcc/templates/esp.yaml index aed6f296ff1..0fda15c0e53 100644 --- a/helm/hpcc/templates/esp.yaml +++ b/helm/hpcc/templates/esp.yaml @@ -116,7 +116,7 @@ spec: server: {{ .name | quote }} accessDali: "yes" app: {{ $application }} - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "name" $application "component" "esp" "instance" .name) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} diff --git a/helm/hpcc/templates/localroxie.yaml b/helm/hpcc/templates/localroxie.yaml index e4aeb8e003e..55ee262302b 100644 --- a/helm/hpcc/templates/localroxie.yaml +++ b/helm/hpcc/templates/localroxie.yaml @@ -68,7 +68,7 @@ spec: server: {{ $servername | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $roxie.name) | indent 8 }} {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} diff --git a/helm/hpcc/templates/roxie.yaml b/helm/hpcc/templates/roxie.yaml index 4cebeca6c2f..3d566915941 100644 --- a/helm/hpcc/templates/roxie.yaml +++ b/helm/hpcc/templates/roxie.yaml @@ -116,7 +116,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 8 }} run: {{ $commonCtx.toponame | quote }} roxie-cluster: {{ $roxie.name | quote }} - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} @@ -176,7 +176,7 @@ kind: Service metadata: name: {{ $commonCtx.toponame | quote }} labels: - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 4 }} spec: ports: @@ -236,7 +236,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $servername) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} @@ -338,7 +338,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} diff --git a/helm/hpcc/templates/sasha.yaml b/helm/hpcc/templates/sasha.yaml index 5ab240c3e8c..13c56792cea 100644 --- a/helm/hpcc/templates/sasha.yaml +++ b/helm/hpcc/templates/sasha.yaml @@ -52,7 +52,7 @@ spec: run: {{ $serviceName | quote }} server: {{ $serviceName | quote }} accessDali: {{ (has "dali" $sasha.access) | ternary "yes" "no" | quote }} - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- if hasKey $sasha "labels" }} {{ toYaml $sasha.labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/thor.yaml b/helm/hpcc/templates/thor.yaml index 59806571a28..01fe9556a96 100644 --- a/helm/hpcc/templates/thor.yaml +++ b/helm/hpcc/templates/thor.yaml @@ -82,7 +82,7 @@ data: labels: accessDali: "yes" accessEsp: "yes" - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $eclAgentJobName) | indent 8 }} {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} @@ -149,7 +149,7 @@ data: accessEsp: "yes" app: "thor" component: "thormanager" - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thormanager" "name" "thor" "instance" $thorManagerJobName ) | indent 12 }} @@ -217,7 +217,7 @@ data: accessEsp: "true" app: "thor" component: "thorworker" - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thorworker" "name" "thor" "instance" $thorWorkerJobName ) | indent 12 }} @@ -350,7 +350,7 @@ spec: accessEsp: {{ $commonCtx.eclAgentUseChildProcesses | ternary "yes" "no" | quote }} app: "thor" component: "thor-eclagent" - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 instance: {{ $commonCtx.eclAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.eclAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} @@ -412,7 +412,7 @@ spec: accessEsp: "no" app: "thor" component: "thor-thoragent" - helmVersion: 8.8.35-closedown0 + helmVersion: 8.8.37-closedown0 instance: {{ $commonCtx.thorAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.thorAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} diff --git a/version.cmake b/version.cmake index b2b8fe4528f..6829a3d041f 100644 --- a/version.cmake +++ b/version.cmake @@ -5,7 +5,7 @@ set ( HPCC_NAME "Community Edition" ) set ( HPCC_PROJECT "community" ) set ( HPCC_MAJOR 8 ) set ( HPCC_MINOR 8 ) -set ( HPCC_POINT 35 ) +set ( HPCC_POINT 37 ) set ( HPCC_MATURITY "closedown" ) set ( HPCC_SEQUENCE 0 ) ### From 69cbe09e764d9a5ae8febcb96862d90b3fd2ba0e Mon Sep 17 00:00:00 2001 From: Gavin Halliday Date: Wed, 18 Jan 2023 18:20:19 +0000 Subject: [PATCH 06/21] Split off 8.10.18 Signed-off-by: Gavin Halliday --- helm/hpcc/Chart.yaml | 4 ++-- helm/hpcc/templates/_helpers.tpl | 2 +- helm/hpcc/templates/dafilesrv.yaml | 2 +- helm/hpcc/templates/dali.yaml | 2 +- helm/hpcc/templates/dfuserver.yaml | 2 +- helm/hpcc/templates/eclagent.yaml | 4 ++-- helm/hpcc/templates/eclccserver.yaml | 4 ++-- helm/hpcc/templates/eclscheduler.yaml | 2 +- helm/hpcc/templates/esp.yaml | 2 +- helm/hpcc/templates/localroxie.yaml | 2 +- helm/hpcc/templates/roxie.yaml | 8 ++++---- helm/hpcc/templates/sasha.yaml | 2 +- helm/hpcc/templates/thor.yaml | 10 +++++----- version.cmake | 2 +- 14 files changed, 24 insertions(+), 24 deletions(-) diff --git a/helm/hpcc/Chart.yaml b/helm/hpcc/Chart.yaml index 38116098f39..407bb2faab6 100644 --- a/helm/hpcc/Chart.yaml +++ b/helm/hpcc/Chart.yaml @@ -6,9 +6,9 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 8.10.17-closedown0 +version: 8.10.19-closedown0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 8.10.17-closedown0 +appVersion: 8.10.19-closedown0 diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index 8a2ca8145a6..5de1f167d5a 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -1194,7 +1194,7 @@ kind: Service metadata: name: {{ $lvars.serviceName | quote }} labels: - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $.root "instance" $lvars.serviceName ) | indent 4 }} {{- if $lvars.labels }} {{ toYaml $lvars.labels | indent 4 }} diff --git a/helm/hpcc/templates/dafilesrv.yaml b/helm/hpcc/templates/dafilesrv.yaml index 4de69fb046e..4d77f84e6df 100644 --- a/helm/hpcc/templates/dafilesrv.yaml +++ b/helm/hpcc/templates/dafilesrv.yaml @@ -50,7 +50,7 @@ spec: labels: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dafilesrv" "name" "dafilesrv" "instance" .name) | indent 8 }} server: {{ .name | quote }} - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 annotations: checksum/config: {{ $configSHA }} spec: diff --git a/helm/hpcc/templates/dali.yaml b/helm/hpcc/templates/dali.yaml index bb27d0a99dc..98441e8f373 100644 --- a/helm/hpcc/templates/dali.yaml +++ b/helm/hpcc/templates/dali.yaml @@ -81,7 +81,7 @@ spec: run: {{ $dali.name | quote }} server: {{ $dali.name | quote }} app: dali - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} {{- end }} diff --git a/helm/hpcc/templates/dfuserver.yaml b/helm/hpcc/templates/dfuserver.yaml index d5a908c699c..f04ab1fad56 100644 --- a/helm/hpcc/templates/dfuserver.yaml +++ b/helm/hpcc/templates/dfuserver.yaml @@ -56,7 +56,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "dfuserver" "name" "dfuserver" "instance" .name) | indent 8 }} run: {{ .name | quote }} accessDali: "yes" - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclagent.yaml b/helm/hpcc/templates/eclagent.yaml index dff86cd366e..9997f95338f 100644 --- a/helm/hpcc/templates/eclagent.yaml +++ b/helm/hpcc/templates/eclagent.yaml @@ -58,7 +58,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" $apptype "name" "eclagent" "instance" $appJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -135,7 +135,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclccserver.yaml b/helm/hpcc/templates/eclccserver.yaml index 173ec435756..aea94482dc0 100644 --- a/helm/hpcc/templates/eclccserver.yaml +++ b/helm/hpcc/templates/eclccserver.yaml @@ -57,7 +57,7 @@ data: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclccserver" "name" "eclccserver" "instance" $compileJobName "instanceOf" (printf "%s-job" .me.name)) | indent 12 }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} {{- end }} @@ -142,7 +142,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: {{ .useChildProcesses | default false | ternary "yes" "no" | quote }} - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/eclscheduler.yaml b/helm/hpcc/templates/eclscheduler.yaml index fc667c26eff..3a74ba43f87 100644 --- a/helm/hpcc/templates/eclscheduler.yaml +++ b/helm/hpcc/templates/eclscheduler.yaml @@ -64,7 +64,7 @@ spec: run: {{ .name | quote }} accessDali: "yes" accessEsp: "no" - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/esp.yaml b/helm/hpcc/templates/esp.yaml index c40131f6491..4724b5a4bf1 100644 --- a/helm/hpcc/templates/esp.yaml +++ b/helm/hpcc/templates/esp.yaml @@ -116,7 +116,7 @@ spec: server: {{ .name | quote }} accessDali: "yes" app: {{ $application }} - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "name" $application "component" "esp" "instance" .name) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8 }} diff --git a/helm/hpcc/templates/localroxie.yaml b/helm/hpcc/templates/localroxie.yaml index 842fd430127..3df414376a4 100644 --- a/helm/hpcc/templates/localroxie.yaml +++ b/helm/hpcc/templates/localroxie.yaml @@ -68,7 +68,7 @@ spec: server: {{ $servername | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $roxie.name) | indent 8 }} {{- if hasKey . "labels" }} {{ toYaml .labels | indent 8 }} diff --git a/helm/hpcc/templates/roxie.yaml b/helm/hpcc/templates/roxie.yaml index da7c3a84263..98c4e5e13ab 100644 --- a/helm/hpcc/templates/roxie.yaml +++ b/helm/hpcc/templates/roxie.yaml @@ -116,7 +116,7 @@ spec: {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 8 }} run: {{ $commonCtx.toponame | quote }} roxie-cluster: {{ $roxie.name | quote }} - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} @@ -176,7 +176,7 @@ kind: Service metadata: name: {{ $commonCtx.toponame | quote }} labels: - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "topology-server" "name" "roxie" "instance" $commonCtx.toponame) | indent 4 }} spec: ports: @@ -238,7 +238,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "roxie-server" "name" "roxie" "instance" $servername) | indent 8 }} {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} @@ -341,7 +341,7 @@ spec: roxie-cluster: {{ $roxie.name | quote }} accessDali: "yes" accessEsp: "yes" - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- if hasKey $.Values.global "metrics" }} {{- include "hpcc.generateMetricsReporterLabel" $.Values.global.metrics | nindent 8}} {{- end }} diff --git a/helm/hpcc/templates/sasha.yaml b/helm/hpcc/templates/sasha.yaml index e119806ba1a..6854c7ac34b 100644 --- a/helm/hpcc/templates/sasha.yaml +++ b/helm/hpcc/templates/sasha.yaml @@ -52,7 +52,7 @@ spec: run: {{ $serviceName | quote }} server: {{ $serviceName | quote }} accessDali: {{ (has "dali" $sasha.access) | ternary "yes" "no" | quote }} - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- if hasKey $sasha "labels" }} {{ toYaml $sasha.labels | indent 8 }} {{- end }} diff --git a/helm/hpcc/templates/thor.yaml b/helm/hpcc/templates/thor.yaml index 699a64e6070..52b3ced23d1 100644 --- a/helm/hpcc/templates/thor.yaml +++ b/helm/hpcc/templates/thor.yaml @@ -82,7 +82,7 @@ data: labels: accessDali: "yes" accessEsp: "yes" - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $eclAgentJobName "instanceOf" (printf "%s-job" .eclAgentName)) | indent 8 }} {{- if hasKey .me "labels" }} {{ toYaml .me.labels | indent 12 }} @@ -149,7 +149,7 @@ data: accessEsp: "yes" app: "thor" component: "thormanager" - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thormanager" "name" "thor" "instance" $thorManagerJobName "instanceOf" (printf "%s-thormanager-job" .me.name)) | indent 12 }} @@ -218,7 +218,7 @@ data: accessEsp: "yes" app: "thor" component: "thorworker" - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 instance: "_HPCC_JOBNAME_" job: "_HPCC_JOBNAME_" {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "thorworker" "name" "thor" "instance" $thorWorkerJobName "instanceOf" (printf "%s-thorworker-job" .me.name)) | indent 12 }} @@ -351,7 +351,7 @@ spec: accessEsp: {{ $commonCtx.eclAgentUseChildProcesses | ternary "yes" "no" | quote }} app: "thor" component: "thor-eclagent" - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 instance: {{ $commonCtx.eclAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.eclAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} @@ -416,7 +416,7 @@ spec: accessEsp: "no" app: "thor" component: "thor-thoragent" - helmVersion: 8.10.17-closedown0 + helmVersion: 8.10.19-closedown0 instance: {{ $commonCtx.thorAgentName | quote }} {{- include "hpcc.addStandardLabels" (dict "root" $ "component" "eclagent" "name" "thor" "instance" $commonCtx.thorAgentName ) | indent 8 }} {{- if hasKey $commonCtx.me "labels" }} diff --git a/version.cmake b/version.cmake index 7bf8df6a8da..2e917a850ab 100644 --- a/version.cmake +++ b/version.cmake @@ -5,7 +5,7 @@ set ( HPCC_NAME "Community Edition" ) set ( HPCC_PROJECT "community" ) set ( HPCC_MAJOR 8 ) set ( HPCC_MINOR 10 ) -set ( HPCC_POINT 17 ) +set ( HPCC_POINT 19 ) set ( HPCC_MATURITY "closedown" ) set ( HPCC_SEQUENCE 0 ) ### From a8a09913474bc347b6f3a71afa8ed9a8ee991d84 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Wed, 18 Jan 2023 19:41:26 +0000 Subject: [PATCH 07/21] HPCC-28800 Disable release notes creation Currently they exceed a max len, which prevents the build Signed-off-by: Gordon Smith --- .github/workflows/build-assets.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-assets.yml b/.github/workflows/build-assets.yml index 7d4a052ad07..40a553fa45d 100644 --- a/.github/workflows/build-assets.yml +++ b/.github/workflows/build-assets.yml @@ -27,7 +27,7 @@ jobs: - name: Release HPCC-Platform uses: ncipollo/release-action@v1.12.0 with: - generateReleaseNotes: true + generateReleaseNotes: false prerelease: ${{ contains(github.ref, '-rc') }} - name: Release LN uses: ncipollo/release-action@v1.12.0 @@ -35,7 +35,7 @@ jobs: owner: ${{ secrets.LNB_ACTOR }} repo: LN token: ${{ secrets.LNB_TOKEN }} - generateReleaseNotes: true + generateReleaseNotes: false prerelease: ${{ contains(github.ref, '-rc') }} From 4fc65dedfcacbde22ea192e614d78c4008b64e8b Mon Sep 17 00:00:00 2001 From: wangkx Date: Fri, 4 Nov 2022 15:43:29 -0400 Subject: [PATCH 08/21] HPCC-28553 Check dropzone scope access in ESP FileSpray 1. Add getDropZoneScopePermissions() to CDistributedFileDirectory. 2. Add getDropZoneScopePermissions() to the ESP SMC lib. 3. Check dropzone scope access using the getDropZoneScopePermissions(). Revise bassed on review 1. Change the variable names from isExternalPlane to isPlane. 2. Add more information to the error messages. 3. Check isDir before checking scope access for the files in subfolders. 4. Remove a comment. 5. Remove the extra newline. 6. When searching dropzone files, do my own recursion into subdirs and check the scope permissions. 7. When deleting given dropzone files, add the code to check the scope permissions for every given files. 8. Rename one isExternalPlane to isPlane. 9. Remove one unusued param in getDropZoneScopePermissions() and change the related code for calling the getDropZoneScopePermissions(). 10. Remove the extra fname length checks. 11. Remove the unused pathSep. 12. Pass in nullptr for the relDir. 13. Change the code to use getSecAccessFlagName(). 14. Clean the checkDropZoneFileScopeAccess(). More changes based on review 1. Unify the getDropZoneScopePermissions() in the dali base lib to only accept dropzone name. 2. Add getPlaneHosts() to the dali base lib. 3. Remove the makeDropZoneScope() and getDropZoneInfoByIP(). 4. Unify the onDespray() using the findDropZonePlane(). 5. Add findDropZonePlaneName() to find out dropzone plane name from dropzone host and path using findDropZonePlane(). 6. Modify the getDropZoneScopePermissions() in the SMC lib to support the getDropZoneScopePermissions() in the dali base lib. 7. Modify the getDropZoneScopePermissions() calls in: CFileSpraySoapBindingEx::downloadFile CFileSpraySoapBindingEx::onStartUpload CFileSprayEx::onFileList CFileSprayEx::onDropZoneFileSearch CFileSprayEx::onDropZoneFiles CFileSprayEx::onDeleteDropZoneFiles 8. Modify onFileList() to allow the host not been specified (retrieve the files from all of the hosts in a dropzone). 9. Modify the getDropZoneFiles() so that it can be used by multiple functions. 10. In thee searchDropZoneFiles(), do not use the nameFilter when calling the directoryFiles(). 11. Revise the getPlaneHostGroup(). 12. Remove the misleading 'true' when calling getScopes(). 13. Change the code to call the splitDirTail(). 14. Fix a bug: when a dropzone host is not requested in the FileSpray.FileList (the method will report the files in all hosts in a dropzone), the validateDropZonePath() should be called for all hosts. The code should not check the log access. 15. Use localhost in FileList when no hostGroup and hosts settings. 16. Revise getPlaneHostGroup() based on review. 17. Add user ID and allowed permission to error messages based on review. Signed-off-by: wangkx --- dali/base/dadfs.cpp | 10 + dali/base/dadfs.hpp | 1 + dali/base/dautils.cpp | 39 +-- dali/base/dautils.hpp | 1 + esp/scm/ws_fs.ecm | 3 +- esp/services/ws_fs/ws_fsBinding.cpp | 14 +- esp/services/ws_fs/ws_fsService.cpp | 402 ++++++++++++---------------- esp/services/ws_fs/ws_fsService.hpp | 9 +- esp/smc/SMCLib/TpCommon.cpp | 19 ++ esp/smc/SMCLib/TpWrapper.hpp | 2 + 10 files changed, 246 insertions(+), 254 deletions(-) diff --git a/dali/base/dadfs.cpp b/dali/base/dadfs.cpp index b0433e08910..a5b534ee1d4 100644 --- a/dali/base/dadfs.cpp +++ b/dali/base/dadfs.cpp @@ -1136,6 +1136,7 @@ protected: friend class CDistributedFile; SecAccessFlags getFilePermissions(const char *lname,IUserDescriptor *user,unsigned auditflags); SecAccessFlags getNodePermissions(const IpAddress &ip,IUserDescriptor *user,unsigned auditflags); SecAccessFlags getFDescPermissions(IFileDescriptor *,IUserDescriptor *user,unsigned auditflags=0); + SecAccessFlags getDropZoneScopePermissions(const char *dropZoneName,const char *dropZonePath,IUserDescriptor *user,unsigned auditflags=0); void setDefaultUser(IUserDescriptor *user); IUserDescriptor* queryDefaultUser(); @@ -11776,6 +11777,15 @@ SecAccessFlags CDistributedFileDirectory::getFDescPermissions(IFileDescriptor *f return retPerms; } +SecAccessFlags CDistributedFileDirectory::getDropZoneScopePermissions(const char *dropZoneName,const char *dropZonePath,IUserDescriptor *user,unsigned auditflags) +{ + CDfsLogicalFileName dlfn; + dlfn.setPlaneExternal(dropZoneName,dropZonePath); + StringBuffer scopes; + dlfn.getScopes(scopes); + return getScopePermissions(scopes,user,auditflags); +} + void CDistributedFileDirectory::setDefaultUser(IUserDescriptor *user) { if (user) diff --git a/dali/base/dadfs.hpp b/dali/base/dadfs.hpp index 20c9b20066d..6b82849f4c5 100644 --- a/dali/base/dadfs.hpp +++ b/dali/base/dadfs.hpp @@ -646,6 +646,7 @@ interface IDistributedFileDirectory: extends IInterface virtual IUserDescriptor* queryDefaultUser()=0; virtual SecAccessFlags getNodePermissions(const IpAddress &ip,IUserDescriptor *user,unsigned auditflags=0)=0; virtual SecAccessFlags getFDescPermissions(IFileDescriptor *,IUserDescriptor *user,unsigned auditflags=0)=0; + virtual SecAccessFlags getDropZoneScopePermissions(const char *dropZoneName,const char *dropZonePath,IUserDescriptor *user,unsigned auditflags=0)=0; virtual DistributedFileCompareResult fileCompare(const char *lfn1,const char *lfn2,DistributedFileCompareMode mode,StringBuffer &errstr,IUserDescriptor *user)=0; virtual bool filePhysicalVerify(const char *lfn1,IUserDescriptor *user,bool includecrc,StringBuffer &errstr)=0; diff --git a/dali/base/dautils.cpp b/dali/base/dautils.cpp index a9dea004e79..99b43ddd819 100644 --- a/dali/base/dautils.cpp +++ b/dali/base/dautils.cpp @@ -46,18 +46,20 @@ #define SDS_CONNECT_TIMEOUT (1000*60*60*2) // better than infinite #define MIN_REDIRECTION_LOAD_INTERVAL 1000 +static IPropertyTree *getPlaneHostGroup(IPropertyTree *plane) +{ + if (plane->hasProp("@hostGroup")) + return getHostGroup(plane->queryProp("@hostGroup"), true); + else if (plane->hasProp("hosts")) + return LINK(plane); // plane itself holds 'hosts' + return nullptr; +} bool isHostInPlane(IPropertyTree *plane, const char *host, bool ipMatch) { - Owned planeGroup; - if (plane->hasProp("@hostGroup")) - planeGroup.setown(getHostGroup(plane->queryProp("@hostGroup"), true)); - else - { - if (!plane->hasProp("hosts")) - return false; - planeGroup.set(plane); // plane itself holds 'hosts' - } + Owned planeGroup = getPlaneHostGroup(plane); + if (!planeGroup) + return false; Owned hostsIter = planeGroup->getElements("hosts"); SocketEndpoint hostEp; if (ipMatch) @@ -79,12 +81,8 @@ bool isHostInPlane(IPropertyTree *plane, const char *host, bool ipMatch) bool getPlaneHost(StringBuffer &host, IPropertyTree *plane, unsigned which) { - Owned hostGroup; - if (plane->hasProp("@hostGroup")) - hostGroup.setown(getHostGroup(plane->queryProp("@hostGroup"), true)); - else if (plane->hasProp("hosts")) - hostGroup.set(plane); // the plane holds the "hosts" - else + Owned hostGroup = getPlaneHostGroup(plane); + if (!hostGroup) return false; if (which >= hostGroup->getCount("hosts")) @@ -94,6 +92,17 @@ bool getPlaneHost(StringBuffer &host, IPropertyTree *plane, unsigned which) return true; } +void getPlaneHosts(StringArray &hosts, IPropertyTree *plane) +{ + Owned hostGroup = getPlaneHostGroup(plane); + if (hostGroup) + { + Owned hostsIter = hostGroup->getElements("hosts"); + ForEach (*hostsIter) + hosts.append(hostsIter->query().queryProp(nullptr)); + } +} + constexpr const char * lz_plane_path = "storage/planes[@category='lz']"; IPropertyTreeIterator * getDropZonePlanesIterator(const char * name) diff --git a/dali/base/dautils.hpp b/dali/base/dautils.hpp index 75e850b48e4..d3a25edf177 100644 --- a/dali/base/dautils.hpp +++ b/dali/base/dautils.hpp @@ -545,6 +545,7 @@ extern da_decl IPropertyTree * getDropZonePlane(const char * name); extern da_decl IPropertyTree * findDropZonePlane(const char * path, const char * host, bool ipMatch); extern da_decl bool isHostInPlane(IPropertyTree *plane, const char *host, bool ipMatch); extern da_decl bool getPlaneHost(StringBuffer &host, IPropertyTree *plane, unsigned which); +extern da_decl void getPlaneHosts(StringArray &hosts, IPropertyTree *plane); extern da_decl void setPageCacheTimeoutMilliSeconds(unsigned timeoutSeconds); extern da_decl void setMaxPageCacheItems(unsigned _maxPageCacheItems); extern da_decl IRemoteConnection* connectXPathOrFile(const char* path, bool safe, StringBuffer& xpath); diff --git a/esp/scm/ws_fs.ecm b/esp/scm/ws_fs.ecm index 5b1d9bd7645..6b4c9911482 100644 --- a/esp/scm/ws_fs.ecm +++ b/esp/scm/ws_fs.ecm @@ -534,6 +534,7 @@ ESPresponse [exceptions_inline] DFUWUFileResponse ESPrequest FileListRequest { + [min_ver("1.24")] string DropZoneName; string Netaddr; string Path; string Mask; @@ -696,7 +697,7 @@ ESPresponse [exceptions_inline, nil_remove] GetDFUServerQueuesResponse ESPservice [ auth_feature("DEFERRED"), - version("1.23"), + version("1.24"), exceptions_inline("./smc_xslt/exceptions.xslt")] FileSpray { ESPmethod EchoDateTime(EchoDateTime, EchoDateTimeResponse); diff --git a/esp/services/ws_fs/ws_fsBinding.cpp b/esp/services/ws_fs/ws_fsBinding.cpp index e4de73d1344..013bf543d51 100644 --- a/esp/services/ws_fs/ws_fsBinding.cpp +++ b/esp/services/ws_fs/ws_fsBinding.cpp @@ -393,12 +393,17 @@ int CFileSpraySoapBindingEx::downloadFile(IEspContext &context, CHttpRequest* re if (!context.validateFeatureAccess(FILE_SPRAY_URL, SecAccess_Full, false)) throw MakeStringException(ECLWATCH_FILE_SPRAY_ACCESS_DENIED, "Failed to download file. Permission denied."); - StringBuffer netAddressStr, osStr, pathStr, nameStr; + StringBuffer netAddressStr, osStr, pathStr, nameStr, dropZoneName; request->getParameter("NetAddress", netAddressStr); request->getParameter("OS", osStr); request->getParameter("Path", pathStr); request->getParameter("Name", nameStr); + request->getParameter("DropZoneName", dropZoneName); + SecAccessFlags permission = getDropZoneScopePermissions(context, dropZoneName, pathStr, netAddressStr); + if (permission < SecAccess_Read) + throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "Access DropZone Scope %s %s %s not allowed for user %s (permission:%s). Read Access Required.", + dropZoneName.str(), netAddressStr.str(), pathStr.str(), context.queryUserId(), getSecAccessFlagName(permission)); #if 0 StringArray files; IProperties* params = request->queryParameters(); @@ -479,11 +484,16 @@ int CFileSpraySoapBindingEx::downloadFile(IEspContext &context, CHttpRequest* re int CFileSpraySoapBindingEx::onStartUpload(IEspContext& ctx, CHttpRequest* request, CHttpResponse* response, const char* serv, const char* method) { - StringBuffer netAddress, path; + StringBuffer netAddress, path, dropZoneName; request->getParameter("NetAddress", netAddress); request->getParameter("Path", path); + request->getParameter("DropZoneName", dropZoneName); if (!validateDropZonePath(nullptr, netAddress, path)) //The path should be the absolute path for the dropzone. throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "Invalid Landing Zone path %s", path.str()); + SecAccessFlags permission = getDropZoneScopePermissions(ctx, dropZoneName, path, netAddress); + if (permission < SecAccess_Full) + throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "Access DropZone Scope %s %s %s not allowed for user %s (permission:%s). Full Access Required.", + dropZoneName.str(), netAddress.str(), path.str(), ctx.queryUserId(), getSecAccessFlagName(permission)); return EspHttpBinding::onStartUpload(ctx, request, response, serv, method); } diff --git a/esp/services/ws_fs/ws_fsService.cpp b/esp/services/ws_fs/ws_fsService.cpp index 97c61dab6f4..53a9fc7d8fe 100644 --- a/esp/services/ws_fs/ws_fsService.cpp +++ b/esp/services/ws_fs/ws_fsService.cpp @@ -2338,100 +2338,6 @@ void CFileSprayEx::getDropZoneInfoByDestPlane(double clientVersion, const char* getDropZoneHost(destPlane, dropZone, hostip); } -void CFileSprayEx::getDropZoneInfoByIP(double clientVersion, const char* ip, const char* destFileIn, StringBuffer& destFileOut, StringBuffer& umask) -{ -#ifndef _CONTAINERIZED - if (destFileIn && *destFileIn) - destFileOut.set(destFileIn); - - if (!ip || !*ip) - throw MakeStringExceptionDirect(ECLWATCH_INVALID_IP, "Network address must be specified for a drop zone!"); - - Owned factory = getEnvironmentFactory(true); - Owned constEnv = factory->openEnvironment(); - - StringBuffer destFile; - if (isAbsolutePath(destFileIn)) - { - destFile.set(destFileIn); - Owned dropZone = constEnv->getDropZoneByAddressPath(ip, destFile.str()); - if (!dropZone) - { - if (constEnv->isDropZoneRestrictionEnabled()) - throw MakeStringException(ECLWATCH_DROP_ZONE_NOT_FOUND, "No drop zone configured for '%s' and '%s'. Check your system drop zone configuration.", ip, destFile.str()); - else - { - LOG(MCdebugInfo, unknownJob, "No drop zone configured for '%s' and '%s'. Check your system drop zone configuration.", ip, destFile.str()); - return; - } - } - - - SCMStringBuffer directory, maskBuf; - dropZone->getDirectory(directory); - destFileOut.set(destFile.str()); - dropZone->getUMask(maskBuf); - if (maskBuf.length()) - umask.set(maskBuf.str()); - - return; - } - - Owned dropZoneItr = constEnv->getDropZoneIteratorByAddress(ip); - if (dropZoneItr->count() < 1) - { - if (constEnv->isDropZoneRestrictionEnabled()) - throw MakeStringException(ECLWATCH_DROP_ZONE_NOT_FOUND, "Drop zone not found for network address '%s'. Check your system drop zone configuration.", ip); - else - { - LOG(MCdebugInfo, unknownJob, "Drop zone not found for network address '%s'. Check your system drop zone configuration.", ip); - return; - } - } - - bool dzFound = false; - ForEach(*dropZoneItr) - { - IConstDropZoneInfo& dropZoneInfo = dropZoneItr->query(); - - SCMStringBuffer dropZoneDirectory, dropZoneUMask; - dropZoneInfo.getDirectory(dropZoneDirectory); - dropZoneInfo.getUMask(dropZoneUMask); - if (!dropZoneDirectory.length()) - continue; - - if (!dzFound) - { - dzFound = true; - destFileOut.set(dropZoneDirectory.str()); - addPathSepChar(destFileOut); - destFileOut.append(destFileIn); - if (dropZoneUMask.length()) - umask.set(dropZoneUMask.str()); - } - else - { - if (constEnv->isDropZoneRestrictionEnabled()) - throw MakeStringException(ECLWATCH_INVALID_INPUT, "> 1 drop zones found for network address '%s'.", ip); - else - { - LOG(MCdebugInfo, unknownJob, "> 1 drop zones found for network address '%s'.", ip); - return; - } - } - } - if (!dzFound) - { - if (constEnv->isDropZoneRestrictionEnabled()) - throw MakeStringException(ECLWATCH_DROP_ZONE_NOT_FOUND, "No valid drop zone found for network address '%s'. Check your system drop zone configuration.", ip); - else - LOG(MCdebugInfo, unknownJob, "No valid drop zone found for network address '%s'. Check your system drop zone configuration.", ip); - } -#else - throw makeStringException(-1, "Internal error: CFileSprayEx::getDropZoneInfoByIP should not be called in containerized environment"); -#endif -} - static StringBuffer & expandLogicalAsPhysical(StringBuffer & target, const char * name, const char * separator) { const char * cur = name; @@ -2472,30 +2378,17 @@ bool CFileSprayEx::onDespray(IEspContext &context, IEspDespray &req, IEspDespray MemoryBuffer& dstxml = (MemoryBuffer&)req.getDstxml(); if(dstxml.length() == 0) { -#ifdef _CONTAINERIZED if (isEmptyString(destPlane)) destPlane = req.getDestGroup(); // allow eclwatch to continue providing storage plane as 'destgroup' field if (isEmptyString(destPlane)) { if (destip.isEmpty()) - throw MakeStringException(ECLWATCH_INVALID_INPUT, "Neither destination storage plane or destination IP specified."); - Owned planesIter = getDropZonePlanesIterator(); - ForEach(*planesIter) - { - IPropertyTree &lzPlane = planesIter->query(); - if (isHostInPlane(&lzPlane, destip, true)) - { - destPlane = lzPlane.queryProp("@name"); - break; - } - } - if (isEmptyString(destPlane)) - throw makeStringException(ECLWATCH_INVALID_INPUT, "Destination IP does not match a hosts based storage plane."); + throw makeStringException(ECLWATCH_INVALID_INPUT, "Neither destination storage plane or destination IP specified."); + Owned plane = findDropZonePlane(destPath, destip, true); + if (!plane) + throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "DropZone Plane not found for host %s path %s.", destip.str(), destPath.str()); + destPlane = plane->queryProp("@name"); } -#else - if (isEmptyString(destPlane) && destip.isEmpty()) - throw MakeStringException(ECLWATCH_INVALID_INPUT, "Destination network IP/storage plane not specified."); -#endif //If the destination filename is not provided, calculate a relative filename from the logical filename if(!destfile || !*destfile) @@ -2526,10 +2419,7 @@ bool CFileSprayEx::onDespray(IEspContext &context, IEspDespray &req, IEspDespray if(dstxml.length() == 0) { StringBuffer destfileWithPath, umask; - if (!isEmptyString(destPlane)) - getDropZoneInfoByDestPlane(version, destPlane, destfile, destfileWithPath, umask, destip); - else - getDropZoneInfoByIP(version, destip, destfile, destfileWithPath, umask); + getDropZoneInfoByDestPlane(version, destPlane, destfile, destfileWithPath, umask, destip); RemoteFilename rfn; SocketEndpoint ep(destip.str()); @@ -2922,9 +2812,10 @@ bool CFileSprayEx::onFileList(IEspContext &context, IEspFileListRequest &req, IE throw MakeStringException(ECLWATCH_INVALID_INPUT, "Path not specified."); double version = context.getClientVersion(); + const char* dropZoneName = req.getDropZoneName(); const char* netaddr = req.getNetaddr(); - if (!netaddr || !*netaddr) - throw MakeStringException(ECLWATCH_INVALID_INPUT, "Network address not specified."); + if (isEmptyString(dropZoneName) && isEmptyString(netaddr)) + throw makeStringException(ECLWATCH_INVALID_INPUT, "DropZoneName or Netaddr must be specified."); const char* fileNameMask = req.getMask(); bool directoryOnly = req.getDirectoryOnly(); PROGLOG("FileList: Netaddr %s, Path %s", netaddr, path); @@ -2946,55 +2837,36 @@ bool CFileSprayEx::onFileList(IEspContext &context, IEspFileListRequest &req, IE throw MakeStringException(ECLWATCH_ACCESS_TO_FILE_DENIED, "Only cfg or log file allowed."); } - if (!validateDropZonePath(nullptr, netaddr, sPath) && !validateConfigurationDirectory(nullptr, "log", nullptr, nullptr, sPath)) //The path should be the absolute path for the dropzone or log file. - throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "Invalid file path %s", sPath.str()); - - RemoteFilename rfn; - SocketEndpoint ep; -#ifdef MACHINE_IP - ep.set(MACHINE_IP); -#else - ep.set(netaddr); - if (ep.isNull()) - throw MakeStringException(ECLWATCH_INVALID_INPUT, "FileList: cannot resolve network IP from %s.", netaddr); -#endif - rfn.setPath(ep, sPath.str()); - Owned f = createIFile(rfn); - if (f->isDirectory()!=fileBool::foundYes) - throw MakeStringException(ECLWATCH_INVALID_DIRECTORY, "%s is not a directory.", path); + if (isEmptyString(dropZoneName)) + dropZoneName = findDropZonePlaneName(sPath, netaddr); + SecAccessFlags permission = getDropZoneScopePermissions(context, dropZoneName, sPath, nullptr); + if (permission < SecAccess_Read) + throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "Access DropZone Scope %s %s not allowed for user %s (permission:%s). Read Access Required.", + dropZoneName, sPath.str(), context.queryUserId(), getSecAccessFlagName(permission)); - IArrayOf files; - Owned di = f->directoryFiles(NULL, false, true); - if(di.get() != NULL) + StringArray hosts; + if (isEmptyString(netaddr)) { - ForEach(*di) - { - StringBuffer fname; - di->getName(fname); - - if (fname.length() == 0 || (directoryOnly && !di->isDir()) || (!di->isDir() && !isEmptyString(fileNameMask) && !WildMatch(fname.str(), fileNameMask, true))) - continue; + Owned dropZone = getDropZonePlane(dropZoneName); + if (!dropZone) + throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "Unknown landing zone: %s", dropZoneName); + getPlaneHosts(hosts, dropZone); + if (!hosts.ordinality()) + hosts.append("localhost"); + } + else + hosts.append(netaddr); - Owned onefile = createPhysicalFileStruct(); - - onefile->setName(fname.str()); - onefile->setIsDir(di->isDir()); - onefile->setFilesize(di->getFileSize()); - CDateTime modtime; - StringBuffer timestr; - di->getModifiedTime(modtime); - unsigned y,m,d,h,min,sec,nsec; - modtime.getDate(y,m,d,true); - modtime.getTime(h,min,sec,nsec,true); - timestr.appendf("%04d-%02d-%02d %02d:%02d:%02d", y,m,d,h,min,sec); - onefile->setModifiedtime(timestr.str()); - files.append(*onefile.getLink()); - } + IArrayOf& files = resp.getFiles(); + ForEachItemIn(i, hosts) + { + const char* host = hosts.item(i); + if (validateDropZonePath(nullptr, host, sPath)) + getDropZoneFiles(context, dropZoneName, host, sPath, fileNameMask, directoryOnly, files); } sPath.replace('\\', '/');//XSLT cannot handle backslashes resp.setPath(sPath); - resp.setFiles(files); resp.setNetaddr(netaddr); if (osStr && *osStr) { @@ -3049,22 +2921,15 @@ bool CFileSprayEx::checkDropZoneIPAndPath(double clientVersion, const char* drop return false; } -void CFileSprayEx::addDropZoneFile(IEspContext& context, IDirectoryIterator* di, const char* name, const char pathSep, const char* server, IArrayOf& files) +void CFileSprayEx::addDropZoneFile(IEspContext& context, IDirectoryIterator* di, const char* name, const char* path, const char* server, IArrayOf& files) { - Owned aFile = createPhysicalFileStruct(); + double version = context.getClientVersion(); - const char* pName = strrchr(name, pathSep); - if (!pName) - aFile->setName(name); - else - { - StringBuffer sPath; - sPath.append(pName - name, name); - aFile->setPath(sPath.str()); + Owned aFile = createPhysicalFileStruct(); - pName++; //skip the PathSepChar - aFile->setName(pName); - } + aFile->setName(name); + if (!isEmptyString(path)) + aFile->setPath(path); aFile->setIsDir(di->isDir()); CDateTime modtime; @@ -3076,34 +2941,50 @@ void CFileSprayEx::addDropZoneFile(IEspContext& context, IDirectoryIterator* di, timestr.appendf("%04d-%02d-%02d %02d:%02d:%02d", y,m,d,h,min,sec); aFile->setModifiedtime(timestr.str()); aFile->setFilesize(di->getFileSize()); - aFile->setServer(server); + if (version >= 1.23) + aFile->setServer(server); files.append(*aFile.getLink()); } -void CFileSprayEx::searchDropZoneFiles(IEspContext& context, const char* server, const char* dir, const char* nameFilter, IArrayOf& files, unsigned& filesFound) +bool CFileSprayEx::searchDropZoneFiles(IEspContext& context, const char* dropZoneName, const char* server, + const char* dir, const char* relDir, const char* nameFilter, IArrayOf& files, unsigned& filesFound) { + if (getDropZoneScopePermissions(context, dropZoneName, dir, server) < SecAccess_Read) + return false; + RemoteFilename rfn; SocketEndpoint ep(server); rfn.setPath(ep, dir); Owned f = createIFile(rfn); if(f->isDirectory()!=fileBool::foundYes) - throw MakeStringException(ECLWATCH_INVALID_DIRECTORY, "%s is not a directory.", dir); + throw makeStringExceptionV(ECLWATCH_INVALID_DIRECTORY, "%s is not a directory.", dir); - const char pathSep = getPathSepChar(dir); - Owned di = f->directoryFiles(nameFilter, true, true); + Owned di = f->directoryFiles(nullptr, false, true); ForEach(*di) { StringBuffer fname; di->getName(fname); - if (!fname.length()) + + if (di->isDir()) + { + StringBuffer fullPath(dir), relPath(relDir); + addPathSepChar(fullPath).append(fname); + if (!relPath.isEmpty()) + addPathSepChar(relPath); + relPath.append(fname); + if (!searchDropZoneFiles(context, dropZoneName, server, fullPath, relPath, nameFilter, files, filesFound)) + continue; + } + if (!isEmptyString(nameFilter) && !WildMatch(fname, nameFilter, false)) continue; + addDropZoneFile(context, di, fname.str(), relDir, server, files); + filesFound++; if (filesFound > dropZoneFileSearchMaxFiles) break; - - addDropZoneFile(context, di, fname.str(), pathSep, server, files); } + return true; } bool CFileSprayEx::onDropZoneFileSearch(IEspContext &context, IEspDropZoneFileSearchRequest &req, IEspDropZoneFileSearchResponse &resp) @@ -3137,7 +3018,7 @@ bool CFileSprayEx::onDropZoneFileSearch(IEspContext &context, IEspDropZoneFileSe double version = context.getClientVersion(); bool serverFound = false; unsigned filesFound = 0; - IArrayOf files; + IArrayOf &files = resp.getFiles(); bool isIPAddressReq = isIPAddress(dropZoneServerReq); IArrayOf allTpDropZones; CTpWrapper tpWrapper; @@ -3157,7 +3038,7 @@ bool CFileSprayEx::onDropZoneFileSearch(IEspContext &context, IEspDropZoneFileSe IConstTpMachine& tpMachine = tpMachines.item(ii); if (isEmptyString(dropZoneServerReq) || matchNetAddressRequest(dropZoneServerReq, isIPAddressReq, tpMachine)) { - searchDropZoneFiles(context, tpMachine.getNetaddress(), dropZone.getPath(), nameFilter, files, filesFound); + searchDropZoneFiles(context, dropZoneName, tpMachine.getNetaddress(), dropZone.getPath(), nullptr, nameFilter, files, filesFound); serverFound = true; } } @@ -3170,7 +3051,6 @@ bool CFileSprayEx::onDropZoneFileSearch(IEspContext &context, IEspDropZoneFileSe VStringBuffer msg("More than %u files are found. Only %u files are returned.", dropZoneFileSearchMaxFiles, dropZoneFileSearchMaxFiles); resp.setWarning(msg.str()); } - resp.setFiles(files); } catch(IException* e) { @@ -3271,62 +3151,33 @@ bool CFileSprayEx::onOpenSave(IEspContext &context, IEspOpenSaveRequest &req, IE return true; } -bool CFileSprayEx::getDropZoneFiles(IEspContext &context, const char* dropZone, const char* netaddr, const char* path, - IEspDropZoneFilesRequest &req, IEspDropZoneFilesResponse &resp) +void CFileSprayEx::getDropZoneFiles(IEspContext &context, const char *dropZoneName, const char *host, const char *path, const char *fileNameMask, bool directoryOnly, IArrayOf &files) { - if (!checkDropZoneIPAndPath(context.getClientVersion(), dropZone, netaddr, path)) - throw MakeStringException(ECLWATCH_DROP_ZONE_NOT_FOUND, "Dropzone is not found in the environment settings."); - - bool directoryOnly = req.getDirectoryOnly(); - + SocketEndpoint ep(host); RemoteFilename rfn; - SocketEndpoint ep; -#ifdef MACHINE_IP - ep.set(MACHINE_IP); -#else - ep.set(netaddr); - if (ep.isNull()) - throw MakeStringException(ECLWATCH_INVALID_INPUT, "CFileSprayEx::getDropZoneFiles: cannot resolve network IP from %s.", netaddr); -#endif - rfn.setPath(ep, path); Owned f = createIFile(rfn); - if(f->isDirectory()!=fileBool::foundYes) - throw MakeStringException(ECLWATCH_INVALID_DIRECTORY, "%s is not a directory.", path); + if (f->isDirectory()!=fileBool::foundYes) + throw makeStringExceptionV(ECLWATCH_INVALID_DIRECTORY, "%s is not a directory.", path); - IArrayOf files; - Owned di = f->directoryFiles(NULL, false, true); - if(di.get() != NULL) + Owned di = f->directoryFiles(nullptr, false, true); + ForEach(*di) { - ForEach(*di) - { - StringBuffer fname; - di->getName(fname); + StringBuffer fileName; + di->getName(fileName); - if (fname.length() == 0 || (directoryOnly && !di->isDir())) - continue; - - Owned onefile = createPhysicalFileStruct(); + if ((directoryOnly && !di->isDir()) || (!di->isDir() && !isEmptyString(fileNameMask) && !WildMatch(fileName.str(), fileNameMask, true))) + continue; - onefile->setName(fname.str()); - onefile->setIsDir(di->isDir()); - onefile->setFilesize(di->getFileSize()); - CDateTime modtime; - StringBuffer timestr; - di->getModifiedTime(modtime); - unsigned y,m,d,h,min,sec,nsec; - modtime.getDate(y,m,d,true); - modtime.getTime(h,min,sec,nsec,true); - timestr.appendf("%04d-%02d-%02d %02d:%02d:%02d", y,m,d,h,min,sec); - onefile->setModifiedtime(timestr.str()); - onefile->setServer(netaddr); - files.append(*onefile.getLink()); + if (di->isDir()) + { + VStringBuffer fullPath("%s%s", path, fileName.str()); + if (getDropZoneScopePermissions(context, dropZoneName, fullPath, nullptr) < SecAccess_Read) + continue; } - } - - resp.setFiles(files); - return true; + addDropZoneFile(context, di, fileName, path, host, files); + } } void CFileSprayEx::getServersInDropZone(const char *dropZoneName, IArrayOf &dropZoneList, bool isECLWatchVisibleOnly, StringArray &serverList) @@ -3426,8 +3277,19 @@ bool CFileSprayEx::onDropZoneFiles(IEspContext &context, IEspDropZoneFilesReques } addPathSepChar(directoryStr); + if (isEmptyString(dzName)) + dzName = findDropZonePlaneName(directoryStr, netAddress); + if (getDropZoneScopePermissions(context, dzName, directoryStr, nullptr) < SecAccess_Read) + return false; + + bool directoryOnly = req.getDirectoryOnly(); + IArrayOf &files = resp.getFiles(); if (!isEmptyString(netAddress)) - getDropZoneFiles(context, dzName, netAddress, directoryStr, req, resp); + { + if (!checkDropZoneIPAndPath(context.getClientVersion(), dzName, netAddress, directoryStr)) + throw makeStringException(ECLWATCH_DROP_ZONE_NOT_FOUND, "Dropzone is not found in the environment settings."); + getDropZoneFiles(context, dzName, netAddress, directoryStr, nullptr, directoryOnly, files); + } else { //Find out all DropZone servers inside the DropZone. @@ -3437,7 +3299,11 @@ bool CFileSprayEx::onDropZoneFiles(IEspContext &context, IEspDropZoneFilesReques return true; ForEachItemIn(itr, servers) - getDropZoneFiles(context, dzName, servers.item(itr), directoryStr, req, resp); + { + const char* host = servers.item(itr); + if (checkDropZoneIPAndPath(context.getClientVersion(), dzName, host, directoryStr)) + getDropZoneFiles(context, dzName, host, directoryStr, nullptr, directoryOnly, files); + } } resp.setDropZoneName(dzName); @@ -3481,6 +3347,8 @@ bool CFileSprayEx::onDeleteDropZoneFiles(IEspContext &context, IEspDeleteDropZon if (!checkDropZoneIPAndPath(version, dzName, netAddress, path.str())) throw MakeStringException(ECLWATCH_DROP_ZONE_NOT_FOUND, "Dropzone is not found in the environment settings."); + checkDropZoneFileScopeAccess(context, dzName, netAddress, path, files, SecAccess_Full); + RemoteFilename rfn; SocketEndpoint ep(netAddress); if (ep.isNull()) @@ -3544,6 +3412,76 @@ bool CFileSprayEx::onDeleteDropZoneFiles(IEspContext &context, IEspDeleteDropZon return true; } +void CFileSprayEx::checkDropZoneFileScopeAccess(IEspContext &context, const char *dropZoneName, const char *netAddress, + const char *dropZonePath, const StringArray &dropZoneFiles, SecAccessFlags accessReq) +{ + const char *accessReqName = getSecAccessFlagName(accessReq); + if (isEmptyString(dropZoneName)) + dropZoneName = findDropZonePlaneName(dropZonePath, netAddress); + SecAccessFlags permission = getDropZoneScopePermissions(context, dropZoneName, dropZonePath, nullptr); + if (permission < accessReq) + throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "Access DropZone Scope %s %s not allowed for user %s (permission:%s). %s Permission Required.", + dropZoneName, dropZonePath, accessReqName, context.queryUserId(), getSecAccessFlagName(permission)); + + RemoteFilename rfn; + SocketEndpoint ep(netAddress); + rfn.setIp(ep); + + StringBuffer errorMessage; + MapStringTo uniquePath; + const char pathSep = getPathSepChar(dropZonePath); + ForEachItemIn(i, dropZoneFiles) + { + const char *fileNameWithPath = dropZoneFiles.item(i); + if (isEmptyString(fileNameWithPath)) + continue; + + StringBuffer fileToDelete(dropZonePath); + addPathSepChar(fileToDelete).append(fileNameWithPath); + + StringBuffer pathToCheck; + rfn.setRemotePath(fileToDelete.str()); + Owned rFile = createIFile(rfn); + if (rFile->isDirectory() == fileBool::foundYes) + pathToCheck.append(fileNameWithPath); + else + { + splitDirTail(fileNameWithPath, pathToCheck); + if (pathToCheck.isEmpty()) + continue; + } + + //a subfolder or a file under a subfolder. Check whether accessing the subfolder is allowed. + bool *found = uniquePath.getValue(pathToCheck.str()); + if (found) + { + if (!*found) //found a path denied + errorMessage.append("; ").append(fileNameWithPath); + continue; + } + + StringBuffer fullPath(dropZonePath); + addPathSepChar(fullPath).append(pathToCheck); + SecAccessFlags permission = getDropZoneScopePermissions(context, dropZoneName, fullPath, nullptr); + if (permission < accessReq) + { + uniquePath.setValue(pathToCheck.str(), false); //add a path denied + if (errorMessage.isEmpty()) + errorMessage.setf("User %s (permission:%s): failed to access the DropZone Scopes for the following file(s). %s Permission Required. %s", + context.queryUserId(), getSecAccessFlagName(permission), accessReqName, fileNameWithPath); + else + errorMessage.append("; ").append(fileNameWithPath); + } + else + { + uniquePath.setValue(pathToCheck.str(), true); //add a path allowed + } + } + + if (!errorMessage.isEmpty()) + throw makeStringException(ECLWATCH_INVALID_INPUT, errorMessage.str()); +} + void CFileSprayEx::appendGroupNode(IArrayOf& groupNodes, const char* nodeName, const char* clusterType, bool replicateOutputs) { diff --git a/esp/services/ws_fs/ws_fsService.hpp b/esp/services/ws_fs/ws_fsService.hpp index 91ff5af82e6..7fdd01e0f83 100644 --- a/esp/services/ws_fs/ws_fsService.hpp +++ b/esp/services/ws_fs/ws_fsService.hpp @@ -143,19 +143,20 @@ class CFileSprayEx : public CFileSpray void getInfoFromSasha(IEspContext &context, const char *sashaServer, const char* wuid, IEspDFUWorkunit *info); bool getArchivedWUInfo(IEspContext &context, IEspGetDFUWorkunit &req, IEspGetDFUWorkunitResponse &resp); bool GetArchivedDFUWorkunits(IEspContext &context, IEspGetDFUWorkunits &req, IEspGetDFUWorkunitsResponse &resp); - bool getDropZoneFiles(IEspContext &context, const char* dropZone, const char* netaddr, const char* path, IEspDropZoneFilesRequest &req, IEspDropZoneFilesResponse &resp); + void getDropZoneFiles(IEspContext &context, const char *dropZoneName, const char *host, const char *path, const char *fileNameMask, bool directoryOnly, IArrayOf &files); bool ParseLogicalPath(const char * pLogicalPath, StringBuffer &title); bool ParseLogicalPath(const char * pLogicalPath, const char *group, const char* cluster, StringBuffer &folder, StringBuffer &title, StringBuffer &defaultFolder, StringBuffer &defaultReplicateFolder); StringBuffer& getAcceptLanguage(IEspContext& context, StringBuffer& acceptLanguage); void appendGroupNode(IArrayOf& groupNodes, const char* nodeName, const char* clusterType, bool replicateOutputs); bool getOneDFUWorkunit(IEspContext& context, const char* wuid, IEspGetDFUWorkunitsResponse& resp); - void getDropZoneInfoByIP(double clientVersion, const char* destIP, const char* destFile, StringBuffer& path, StringBuffer& mask); void getDropZoneInfoByDestPlane(double clientVersion, const char* destGroup, const char* destFileIn, StringBuffer& destFileOut, StringBuffer& umask, StringBuffer & hostip); bool checkDropZoneIPAndPath(double clientVersion, const char* dropZone, const char* netAddr, const char* path); - void addDropZoneFile(IEspContext& context, IDirectoryIterator* di, const char* name, const char pathSep, const char* server, IArrayOf&files); - void searchDropZoneFiles(IEspContext& context, const char* server, const char* dir, const char* nameFilter, IArrayOf& files, unsigned& filesFound); + void addDropZoneFile(IEspContext& context, IDirectoryIterator* di, const char* name, const char* path, const char* server, IArrayOf&files); + bool searchDropZoneFiles(IEspContext& context, const char* dropZone, const char* server, const char* dir, const char* relDir, const char* nameFilter, IArrayOf& files, unsigned& filesFound); void setDFUServerQueueReq(const char* dfuServerQueue, IDFUWorkUnit* wu); void setUserAuth(IEspContext &context, IDFUWorkUnit* wu); + void checkDropZoneFileScopeAccess(IEspContext &context, const char *dropZoneName, const char *netAddress, + const char *dropZonePath, const StringArray &dropZoneFiles, SecAccessFlags accessReq); }; #endif //_ESPWIZ_FileSpray_HPP__ diff --git a/esp/smc/SMCLib/TpCommon.cpp b/esp/smc/SMCLib/TpCommon.cpp index 08ac870cc49..f379227d272 100644 --- a/esp/smc/SMCLib/TpCommon.cpp +++ b/esp/smc/SMCLib/TpCommon.cpp @@ -156,3 +156,22 @@ extern TPWRAPPER_API bool validateDropZonePath(const char* dropZoneName, const c return false; } +extern TPWRAPPER_API const char* findDropZonePlaneName(const char* dropZonePath, const char* dropZoneHost) +{ + Owned plane = findDropZonePlane(dropZonePath, dropZoneHost, true); + if (!plane) + throw makeStringExceptionV(ECLWATCH_INVALID_INPUT, "findDropZonePlaneName(): DropZone not found for Host %s and Path %s.", dropZoneHost, dropZonePath); + return plane->queryProp("@name"); +} + +extern TPWRAPPER_API SecAccessFlags getDropZoneScopePermissions(IEspContext& context, const char* dropZoneName, const char* dropZonePath, const char* dropZoneHost) +{ + if (isEmptyString(dropZonePath)) + throw makeStringException(ECLWATCH_INVALID_CLUSTER_NAME, "getDropZoneScopePermissions(): DropZone path must be specified."); + if (isEmptyString(dropZoneName)) + dropZoneName = findDropZonePlaneName(dropZonePath, dropZoneHost); + + Owned userDesc = createUserDescriptor(); + userDesc->set(context.queryUserId(), context.queryPassword(), context.querySignature()); + return queryDistributedFileDirectory().getDropZoneScopePermissions(dropZoneName, dropZonePath, userDesc); +} diff --git a/esp/smc/SMCLib/TpWrapper.hpp b/esp/smc/SMCLib/TpWrapper.hpp index 29b6f28eebe..116a282b1e8 100644 --- a/esp/smc/SMCLib/TpWrapper.hpp +++ b/esp/smc/SMCLib/TpWrapper.hpp @@ -229,6 +229,8 @@ extern TPWRAPPER_API bool validateDataPlaneName(const char *remoteDali, const ch extern TPWRAPPER_API bool matchNetAddressRequest(const char* netAddressReg, bool ipReq, IConstTpMachine& tpMachine); extern TPWRAPPER_API bool validateDropZonePath(const char* dropZoneName, const char* netAddr, const char* pathToCheck); +extern TPWRAPPER_API const char* findDropZonePlaneName(const char* dropZonePath, const char* dropZoneHost); +extern TPWRAPPER_API SecAccessFlags getDropZoneScopePermissions(IEspContext& context, const char * dropZoneName, const char * dropZonePath, const char * dropZoneHost); #endif //_ESPWIZ_TpWrapper_HPP__ From f68ff67556a9574d0ce363841a0cb5659341a1d7 Mon Sep 17 00:00:00 2001 From: Kunal Aswani Date: Fri, 20 Jan 2023 15:27:50 -0500 Subject: [PATCH 09/21] HPCC-28701 ES Translations for 8.12.X Translations added for Spanish (ES). Signed-off-by: Kunal Aswani --- esp/src/src/nls/es/hpcc.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/esp/src/src/nls/es/hpcc.ts b/esp/src/src/nls/es/hpcc.ts index 714d8c9b260..a541d9e0500 100644 --- a/esp/src/src/nls/es/hpcc.ts +++ b/esp/src/src/nls/es/hpcc.ts @@ -2,6 +2,7 @@ export = { Abort: "Aborte", AbortedBy: "Abortado por", AbortedTime: "Abortado a las", + AbortSelectedWorkunits: "¿Cancelar unidad(es) de trabajo seleccionada(s)? Su ID de inicio de sesión se registrará para esta acción dentro de la(s) WU(s).", About: "Acerca", AboutGraphControl: "Acerca del controlador gráfico", AboutHPCCSystems: "Acerca de HPCC Systems®", @@ -111,6 +112,7 @@ export = { Columns: "Columnas", Command: "Comando", Comment: "Comentario", + CompileCost: "Costo de compilación", Compiled: "Compilado", Compiling: "Compilando", Completed: "Completado", @@ -233,6 +235,7 @@ export = { Downloads: "Descargas", DownloadSelectionAsCSV: "Descargar la selección como CSV", DownloadToCSV: "Bajar en formato CSV", + DownloadToCSVNonFlatWarning: "Tenga en cuenta: la descarga de archivos que contienen conjuntos de datos anidados como datos separados por comas puede no tener el formato esperado", DropZone: "Zona de carga", DueToInctivity: "Se desconectará de todas las sesiones de ECL Watch en 3 minutos debido a inactividad.", Duration: "Duración", @@ -640,6 +643,7 @@ export = { PleaseSelectAUserToAdd: "Por favor escoja el usario para agregar", Plugins: "Complementos", Pods: "Pods", + PodsAccessError: "No se puede recuperar la lista de pods", Port: "Puerto", Prefix: "Prefijo", PrefixPlaceholder: "filename{:length}, filesize{:[B|L][1-8]}", @@ -875,6 +879,7 @@ export = { ThorProcess: "Proceso de Thor", ThreadID: "Identificación de subproceso", Time: "Tiempo", + Timeline: "Cronología", TimeMaxTotalExecuteMinutes: "Maximo tiempo total de ejecucion en minutos", TimeMeanTotalExecuteMinutes: "Total tiempo total de ejecucion en minutos", TimeMinTotalExecuteMinutes: "Minomo tiempo total de ejecucion en minutos", From 94362e36f6ff8d22b2c3a3aaa9f2976c2f7cd405 Mon Sep 17 00:00:00 2001 From: Kunal Aswani Date: Fri, 20 Jan 2023 15:52:21 -0500 Subject: [PATCH 10/21] HPCC-28704 ZH Translations for 8.12.X Translations added for Chinese (ZH). Signed-off-by: Kunal Aswani --- esp/src/src/nls/zh/hpcc.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/esp/src/src/nls/zh/hpcc.ts b/esp/src/src/nls/zh/hpcc.ts index 38104b9d3be..52003b9d493 100644 --- a/esp/src/src/nls/zh/hpcc.ts +++ b/esp/src/src/nls/zh/hpcc.ts @@ -2,6 +2,7 @@ Abort: "终止", AbortedBy: "终止者", AbortedTime: "终止时间", + AbortSelectedWorkunits: "中止所选择的工作单元?你的登录代号和操作会记录在工作单元里。", About: "本系统简介", AboutGraphControl: "图形控制器简介", AboutHPCCSystems: "HPCC Systems®简介", @@ -111,6 +112,7 @@ Columns: "列", Command: "指令", Comment: "注释", + CompileCost: "编译费用", Compiled: "已编译", Compiling: "编译", Completed: "完成", @@ -233,6 +235,7 @@ Downloads: "下载", DownloadSelectionAsCSV: "下载CSV格式", DownloadToCSV: "下载成CSV", + DownloadToCSVNonFlatWarning: "注意:把含有嵌套格式的数据文件下载为逗号分隔文件可能产生意想不到的格式。", DropZone: "文件停放区", DueToInctivity: "如果三分钟之内没有活动,您将退出所有ECL Watch进程。", Duration: "时间段", @@ -640,6 +643,7 @@ PleaseSelectAUserToAdd: "请选择要添加的用户", Plugins: "插件", Pods: "Pods", + PodsAccessError: "无法获取运行Pod表", Port: "端口", Prefix: "前缀", PrefixPlaceholder: "文件名{:长度}, 文件大小{:[B|L][1-8]}", @@ -874,6 +878,7 @@ ThorProcess: "Thor 进程", ThreadID: "线程编号", Time: "时间", + Timeline: "时间点", TimeMaxTotalExecuteMinutes: "总运行时间最大值(分钟)", TimeMeanTotalExecuteMinutes: "总运行时间均值(分钟)", TimeMinTotalExecuteMinutes: "总运行时间最小值(分钟)", From fc73543631b5f84631c7b478bc193af546c2ff1f Mon Sep 17 00:00:00 2001 From: Mark Kelly Date: Fri, 20 Jan 2023 16:28:02 -0500 Subject: [PATCH 11/21] HPCC-28820 Fix typo in argument Signed-off-by: Mark Kelly --- tools/roxie/extract-roxie-timings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/roxie/extract-roxie-timings.py b/tools/roxie/extract-roxie-timings.py index d1c2b314e5b..552fd528041 100755 --- a/tools/roxie/extract-roxie-timings.py +++ b/tools/roxie/extract-roxie-timings.py @@ -88,7 +88,7 @@ def printRow(curRow): args = parser.parse_args() combineServices = args.all suppressDetails = args.summaryonly - reportSummary = not args.nosummary or args.summayonly + reportSummary = not args.nosummary or args.summaryonly csv.field_size_limit(0x100000) with open(args.filename, encoding='latin1') as csv_file: From 25de93c847132f6c43c69160a5c55f92d1d5a0d3 Mon Sep 17 00:00:00 2001 From: Anthony Fishbeck Date: Fri, 13 Jan 2023 14:11:12 -0500 Subject: [PATCH 12/21] HPCC-28816 httplib TLS improvements Signed-off-by: Anthony Fishbeck --- helm/hpcc/templates/_helpers.tpl | 3 +++ helm/hpcc/values.schema.json | 4 ++++ system/httplib/httplib.h | 26 +++++++++++++++++++++++--- system/jlib/jsecrets.cpp | 31 +++++++++++++++++++++++++++---- 4 files changed, 57 insertions(+), 7 deletions(-) diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index 13f1256f605..70c4217c18c 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -575,6 +575,9 @@ vaults: kind: {{ $vault.kind }} {{- if $vault.namespace }} namespace: {{ $vault.namespace }} + {{- end }} + {{- if (hasKey $vault "verify_server") }} + verify_server: {{ $vault.verify_server }} {{- end }} url: {{ $vault.url }} {{- if index $vault "client-secret" }} diff --git a/helm/hpcc/values.schema.json b/helm/hpcc/values.schema.json index 5254c87b965..b575d1761e0 100644 --- a/helm/hpcc/values.schema.json +++ b/helm/hpcc/values.schema.json @@ -753,6 +753,10 @@ "namespace": { "description": "the namespace to use when authenticating with, and accessing the vault", "type": "string" + }, + "verify_server": { + "description": "optional relax server verification for trouble shooting", + "type": "boolean" } }, "required": [ "name", "url" ], diff --git a/system/httplib/httplib.h b/system/httplib/httplib.h index d9a793f569f..aa3f511c853 100644 --- a/system/httplib/httplib.h +++ b/system/httplib/httplib.h @@ -236,6 +236,9 @@ inline const unsigned char *ASN1_STRING_get0_data(const ASN1_STRING *asn1) { #include #endif +#include "platform.h" +#include "jlog.hpp" + /* * Declaration */ @@ -4634,7 +4637,11 @@ inline bool ClientImpl::send(const Request &req, Response &res) { } if (!is_alive) { - if (!create_and_connect_socket(socket_)) { return false; } + if (!create_and_connect_socket(socket_)) + { + OERRLOG("HTTPLIB Error create_and_connect_socket failed"); + return false; + } #ifdef CPPHTTPLIB_OPENSSL_SUPPORT // TODO: refactoring @@ -4647,7 +4654,11 @@ inline bool ClientImpl::send(const Request &req, Response &res) { } } - if (!scli.initialize_ssl(socket_)) { return false; } + if (!scli.initialize_ssl(socket_)) + { + OERRLOG("HTTPLIB Error initialize_ssl failed"); + return false; + } } #endif } @@ -4662,7 +4673,11 @@ inline bool ClientImpl::send(const Request &req, Response &res) { if (close_connection || !ret) { stop_core(); } if (!ret) { - if (error_ == Error::Success) { error_ = Error::Unknown; } + if (error_ == Error::Success) + { + OERRLOG("HTTPLIB process_socket unknown error"); + error_ = Error::Unknown; + } } return ret; @@ -5872,6 +5887,7 @@ inline bool SSLClient::initialize_ssl(Socket &socket) { [&](SSL *ssl) { if (server_certificate_verification_) { if (!load_certs()) { + OERRLOG("HTTPLIB Error loading ssl certs"); error_ = Error::SSLLoadingCerts; return false; } @@ -5879,6 +5895,7 @@ inline bool SSLClient::initialize_ssl(Socket &socket) { } if (SSL_connect(ssl) != 1) { + OERRLOG("HTTPLIB Error connecting ssl"); error_ = Error::SSLConnection; return false; } @@ -5887,6 +5904,7 @@ inline bool SSLClient::initialize_ssl(Socket &socket) { verify_result_ = SSL_get_verify_result(ssl); if (verify_result_ != X509_V_OK) { + OERRLOG("HTTPLIB Error verifying server certificate SSL_get_verify_result %ld", verify_result_); error_ = Error::SSLServerVerification; return false; } @@ -5894,11 +5912,13 @@ inline bool SSLClient::initialize_ssl(Socket &socket) { auto server_cert = SSL_get_peer_certificate(ssl); if (server_cert == nullptr) { + OERRLOG("HTTPLIB Error getting server certificate SSL_get_peer_certificate"); error_ = Error::SSLServerVerification; return false; } if (!verify_host(server_cert)) { + OERRLOG("HTTPLIB Error self verifying server certificate verify_host"); X509_free(server_cert); error_ = Error::SSLServerVerification; return false; diff --git a/system/jlib/jsecrets.cpp b/system/jlib/jsecrets.cpp index f3c5a749d4d..be68f3dd983 100644 --- a/system/jlib/jsecrets.cpp +++ b/system/jlib/jsecrets.cpp @@ -248,6 +248,7 @@ class CVault StringBuffer clientToken; time_t clientTokenExpiration = 0; bool clientTokenRenewable = false; + bool verify_server = true; public: CVault(IPropertyTree *vault) @@ -255,14 +256,24 @@ class CVault cache.setown(createPTree()); StringBuffer url; replaceEnvVariables(url, vault->queryProp("@url"), false); + PROGLOG("vault url %s", url.str()); if (url.length()) splitUrlSchemeHostPort(url.str(), username, password, schemeHostPort, path); + + if (username.length() || password.length()) + WARNLOG("vault: unexpected use of basic auth in url, user=%s", username.str()); + name.set(vault->queryProp("@name")); kind = getSecretType(vault->queryProp("@kind")); vaultNamespace.set(vault->queryProp("@namespace")); if (vaultNamespace.length()) + { addPathSepChar(vaultNamespace, '/'); + PROGLOG("vault: namespace %s", vaultNamespace.str()); + } + verify_server = vault->getPropBool("@verify_server", true); + PROGLOG("Vault: httplib verify_server=%s", boolToStr(verify_server)); //set up vault client auth [appRole, clientToken (aka "token from the sky"), or kubernetes auth] appRoleId.set(vault->queryProp("@appRoleId")); @@ -328,7 +339,9 @@ class CVault void processClientTokenResponse(httplib::Result &res) { if (!res) - vaultAuthError("missing login response"); + vaultAuthErrorV("missing login response, error %d", res.error()); + if (res.error()!=0) + OERRLOG("JSECRETS login calling HTTPLIB POST returned error %d", res.error()); if (res->status != 200) vaultAuthErrorV("[%d](%d) - response: %s", res->status, res.error(), res->body.c_str()); const char *json = res->body.c_str(); @@ -355,6 +368,7 @@ class CVault { if (clientTokenExpiration==0) return false; + double remaining = difftime(clientTokenExpiration, time(nullptr)); if (remaining <= 0) { @@ -383,6 +397,8 @@ class CVault std::string json; json.append("{\"jwt\": \"").append(login_token.str()).append("\", \"role\": \"").append(k8sAuthRole.str()).append("\"}"); httplib::Client cli(schemeHostPort.str()); + cli.enable_server_certificate_verification(verify_server); + if (username.length() && password.length()) cli.set_basic_auth(username, password); httplib::Headers headers; @@ -410,12 +426,16 @@ class CVault std::string json; json.append("{\"role_id\": \"").append(appRoleId).append("\", \"secret_id\": \"").append(appRoleSecretId).append("\"}"); + httplib::Client cli(schemeHostPort.str()); + cli.enable_server_certificate_verification(verify_server); + if (username.length() && password.length()) cli.set_basic_auth(username, password); httplib::Headers headers; if (vaultNamespace.length()) headers.emplace("X-Vault-Namespace", vaultNamespace.str()); + httplib::Result res = cli.Post("/v1/auth/approle/login", headers, json, "application/json"); processClientTokenResponse(res); } @@ -472,6 +492,8 @@ class CVault checkAuthentication(permissionDenied); httplib::Client cli(schemeHostPort.str()); + cli.enable_server_certificate_verification(verify_server); + if (username.length() && password.length()) cli.set_basic_auth(username.str(), password.str()); @@ -482,6 +504,7 @@ class CVault headers.emplace("X-Vault-Namespace", vaultNamespace.str()); httplib::Result res = cli.Get(location, headers); + if (res) { if (res->status == 200) @@ -496,15 +519,15 @@ class CVault //try again forcing relogin, but only once. Just in case the token was invalidated but hasn't passed expiration time (for example max usage count exceeded). if (permissionDenied==false) return requestSecretAtLocation(rkind, content, location, secret, version, true); - OERRLOG("Vault %s permission denied accessing secret (check namespace=%s?) %s.%s [%d](%d) - response: %s", name.str(), vaultNamespace.str(), secret, version ? version : "", res->status, res.error(), res->body.c_str()); + OERRLOG("Vault %s permission denied accessing secret (check namespace=%s?) %s.%s location %s [%d](%d) - response: %s", name.str(), vaultNamespace.str(), secret, version ? version : "", location ? location : "null", res->status, res.error(), res->body.c_str()); } else { - OERRLOG("Vault %s error accessing secret %s.%s [%d](%d) - response: %s", name.str(), secret, version ? version : "", res->status, res.error(), res->body.c_str()); + OERRLOG("Vault %s error accessing secret %s.%s location %s [%d](%d) - response: %s", name.str(), secret, version ? version : "", location ? location : "null", res->status, res.error(), res->body.c_str()); } } else - OERRLOG("Error: Vault %s http error (%d) accessing secret %s.%s", name.str(), res.error(), secret, version ? version : ""); + OERRLOG("Error: Vault %s http error (%d) accessing secret %s.%s location %s", name.str(), res.error(), secret, version ? version : "", location ? location : "null"); return false; } bool requestSecret(CVaultKind &rkind, StringBuffer &content, const char *secret, const char *version) From b550142fde7b2d6d5c9602454f225b912732386e Mon Sep 17 00:00:00 2001 From: Richard Chapman Date: Thu, 19 Jan 2023 14:11:43 +0000 Subject: [PATCH 13/21] HPCC-28746 Improve reporting of Maximum Packet Length Exceeded errors Signed-off-by: Richard Chapman --- roxie/ccd/ccdqueue.cpp | 35 +++++++++++++++++++++++++++-------- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/roxie/ccd/ccdqueue.cpp b/roxie/ccd/ccdqueue.cpp index 2256f178095..40776b1e800 100644 --- a/roxie/ccd/ccdqueue.cpp +++ b/roxie/ccd/ccdqueue.cpp @@ -1967,6 +1967,31 @@ class RoxieReceiverBase : implements IRoxieOutputQueueManager, public CInterface #pragma warning ( disable: 4355 ) #endif +static void throwPacketTooLarge(IRoxieQueryPacket *x, unsigned maxPacketSize) +{ + StringBuffer t; + unsigned traceLength = x->getTraceLength(); + if (traceLength) + { + const byte *traceInfo = x->queryTraceInfo(); + unsigned char loggingFlags = *traceInfo; + if (loggingFlags & LOGGING_FLAGSPRESENT) // should always be true.... but this flag is handy to avoid flags byte ever being NULL + { + traceInfo++; + traceLength--; + if (loggingFlags & LOGGING_TRACELEVELSET) + { + traceInfo++; + traceLength--; + } + t.append(traceLength, (const char *) traceInfo); + } + } + throw MakeStringException(ROXIE_PACKET_ERROR, "Maximum packet length %d exceeded sending packet %s (context length %u, continuation length %u, smart step length %u, trace length %u, total length %u", + maxPacketSize, t.str(), + x->getContextLength(), x->getContinuationLength(), x->getSmartStepInfoLength(), x->getTraceLength(), x->queryHeader().packetlength); +} + class RoxieThrottledPacketSender : public Thread { TokenBucket &bucket; @@ -2075,10 +2100,7 @@ class RoxieThrottledPacketSender : public Thread break; } if (length > maxPacketSize) - { - StringBuffer s; - throw MakeStringException(ROXIE_PACKET_ERROR, "Maximum packet length %d exceeded sending packet %s", maxPacketSize, header.toString(s).str()); - } + throwPacketTooLarge(x, maxPacketSize); enqueue(x); } @@ -2413,10 +2435,7 @@ class RoxieSocketQueueManager : public RoxieReceiverBase break; } if (length > maxPacketSize) - { - StringBuffer s; - throw MakeStringException(ROXIE_PACKET_ERROR, "Maximum packet length %d exceeded sending packet %s", maxPacketSize, header.toString(s).str()); - } + throwPacketTooLarge(x, maxPacketSize); Owned serialized = x->serialize(); if (!channelWrite(serialized->queryHeader(), true)) logctx.CTXLOG("Roxie packet write wrote too little"); From bc010102962ded54df4838e28271e38aa512b903 Mon Sep 17 00:00:00 2001 From: Anthony Fishbeck Date: Mon, 23 Jan 2023 03:32:41 -0500 Subject: [PATCH 14/21] HPCC-28826 cert-manager: add support for secretTemplate Add helm support for cert-manager certificate secretTemplate. Kubernetes add-ons for doing things like synchronizing secrets/certificates across aks clusters or namespaces require annotations be added to the kubernetes secret resources to control the new behavior. This can be done through cert-manager using a feature known as the secretTemplate. Signed-off-by: Anthony Fishbeck --- helm/hpcc/templates/_helpers.tpl | 12 +++++++++++- helm/hpcc/templates/esp.yaml | 2 +- helm/hpcc/values.schema.json | 18 ++++++++++++++++++ helm/hpcc/values.yaml | 10 ++++++---- 4 files changed, 36 insertions(+), 6 deletions(-) diff --git a/helm/hpcc/templates/_helpers.tpl b/helm/hpcc/templates/_helpers.tpl index 5de1f167d5a..c236b791ac9 100644 --- a/helm/hpcc/templates/_helpers.tpl +++ b/helm/hpcc/templates/_helpers.tpl @@ -1645,11 +1645,16 @@ remote client certificates. Adding the following to ESP (Roxie support to be added later) remoteClients: - name: myRemoteClient + organization: myorg #optional + secretTemplate: #optional add annotations to generated secret for tools like kubed config-syncer + annotations: + kubed.appscode.com/sync: "hpcc=testns" #sync certificate to matching namespaces + Will generate certificates that can be deployed to the remote client. Will cause ESP to require client certificates when a socket connects. Will create a TLS based access control list which ESP will check to make sure a connections client certificate is enabled. -Pass in root, client (name), organization (optional), instance (myeclwatch), component (eclwatch), visibility +Pass in root, client (name), organization (optional), instance (myeclwatch), component (eclwatch), visibility, secretTemplate (optional) */}} {{- define "hpcc.addClientCertificate" }} {{- if (.root.Values.certificates | default dict).enabled -}} @@ -1670,6 +1675,7 @@ Pass in root, client (name), organization (optional), instance (myeclwatch), com {{- $component := .component -}} {{- $client := .client -}} {{- $organization := .organization -}} + {{- $secretTemplate := .secretTemplate -}} {{- if not $externalCert -}} {{- $_ := fail (printf "Remote certificate defined for non external facing service %s - %s." $component $instance) -}} {{- end }} @@ -1682,6 +1688,10 @@ metadata: spec: # Secret names are always required. secretName: client-{{ $issuerKeyName }}-{{ $component }}-{{ $instance }}-{{ $client }}-tls + {{- if $secretTemplate }} + secretTemplate: +{{ toYaml $secretTemplate | indent 4 }} + {{- end }} duration: 2160h # 90d renewBefore: 360h # 15d subject: diff --git a/helm/hpcc/templates/esp.yaml b/helm/hpcc/templates/esp.yaml index 4724b5a4bf1..6f2872e306e 100644 --- a/helm/hpcc/templates/esp.yaml +++ b/helm/hpcc/templates/esp.yaml @@ -204,7 +204,7 @@ kind: ConfigMap {{- $instance := .name -}} {{- $visibility := .service.visibility -}} {{- range $remoteClient := .remoteClients }} - {{ include "hpcc.addClientCertificate" (dict "root" $ "client" $remoteClient.name "organization" $remoteClient.organization "instance" $instance "component" $application "visibility" $visibility) }} + {{ include "hpcc.addClientCertificate" (dict "root" $ "client" $remoteClient.name "organization" $remoteClient.organization "instance" $instance "component" $application "visibility" $visibility "secretTemplate" $remoteClient.secretTemplate) }} {{- end }} {{- end }} {{- end }} diff --git a/helm/hpcc/values.schema.json b/helm/hpcc/values.schema.json index bce97c293aa..2fdd4d266bf 100644 --- a/helm/hpcc/values.schema.json +++ b/helm/hpcc/values.schema.json @@ -2832,6 +2832,24 @@ "name": { "type": "string", "description": "Remote client name" + }, + "organization": { + "type": "string", + "description": "Remote client organization" + }, + "secretTemplate": { + "type": "object", + "description": "cert-manager secretTemplate for this remoteClient secret", + "properties": { + "annotations": { + "type": "object", + "additionalProperties": { "type": "string" } + }, + "labels": { + "type": "object", + "additionalProperties": { "type": "string" } + } + } } } } diff --git a/helm/hpcc/values.yaml b/helm/hpcc/values.yaml index 2fd2a0d6604..63322bd6255 100644 --- a/helm/hpcc/values.yaml +++ b/helm/hpcc/values.yaml @@ -566,8 +566,12 @@ esp: # Add remote clients to generated client certificates and make the ESP require that one of the generated certificates is provided by a client in order to connect # When setting up remote clients make sure that certificates.issuers.remote.enabled is set to true. # remoteClients: -# - name: myclient -# organization: mycompany +# - name: petfoodApplicationProd +# organization: petfoodDept +# secretTemplate: +# annotations: +# kubed.appscode.com/sync: "hpccenv=petfoodAppProd" # use kubed config-syncer to replicate certificate to namespace with matching annotation (also supports syncing with separate aks clusters) + service: ## port can be used to change the local port used by the pod. If omitted, the default port (8880) is used port: 8888 @@ -651,8 +655,6 @@ esp: application: sql2ecl auth: none replicas: 1 -# remoteClients: -# - name: sqlclient111 service: visibility: local servicePort: 8510 From c448b216e51cffce7a85ec28bb5449d6aa95df6e Mon Sep 17 00:00:00 2001 From: wangkx Date: Tue, 16 Aug 2022 08:42:16 -0400 Subject: [PATCH 15/21] HPCC-28118 Add ws_access.AccountPermissionsV2 The ws_access.AccountPermissionsV2 may be used to replace the existing ws_access.AccountPermissions. The existing ws_access.AccountPermissions has several duplicated calls to LDAP server. In this version, they are removed. Signed-off-by: wangkx --- esp/scm/ws_access.ecm | 18 +- esp/services/ws_access/ws_accessService.cpp | 472 ++++++++++++++++++++ esp/services/ws_access/ws_accessService.hpp | 2 + 3 files changed, 491 insertions(+), 1 deletion(-) diff --git a/esp/scm/ws_access.ecm b/esp/scm/ws_access.ecm index 0a801a8deb5..da34ed9dd5d 100644 --- a/esp/scm/ws_access.ecm +++ b/esp/scm/ws_access.ecm @@ -959,6 +959,21 @@ ESPresponse AccountPermissionsResponse [min_ver("1.03")] ESParray GroupPermissions; }; +ESPrequest AccountPermissionsV2Request +{ + string ResourceName; + string AccountName; + bool IsGroup; + bool IncludeGroup(false); +}; + +ESPresponse AccountPermissionsV2Response +{ + ESParray BasednNames; + ESParray Permissions; + ESParray GroupPermissions; +}; + ESPrequest [nil_remove] FilePermissionRequest { string FileName; @@ -991,7 +1006,7 @@ ESPresponse [nil_remove] UserAccountExportResponse [http_content("application/octet-stream")] binary Result; }; -ESPservice [version("1.16"), auth_feature("NONE"), exceptions_inline("./smc_xslt/exceptions.xslt")] ws_access +ESPservice [version("1.17"), auth_feature("NONE"), exceptions_inline("./smc_xslt/exceptions.xslt")] ws_access { ESPmethod [client_xslt("/esp/xslt/access_users.xslt")] Users(UserRequest, UserResponse); ESPmethod [client_xslt("/esp/xslt/access_useredit.xslt")] UserEdit(UserEditRequest, UserEditResponse); @@ -1034,6 +1049,7 @@ ESPservice [version("1.16"), auth_feature("NONE"), exceptions_inline("./smc_xslt ESPmethod [depr_ver("1.14"), client_xslt("/esp/xslt/access_permissionaddinput.xslt")] PermissionAddInput(PermissionAddRequest, PermissionAddResponse); ESPmethod [client_xslt("/esp/xslt/access_permissionchange.xslt")] PermissionAction(PermissionActionRequest, PermissionActionResponse); ESPmethod [client_xslt("/esp/xslt/access_accountpermissions.xslt")] AccountPermissions(AccountPermissionsRequest, AccountPermissionsResponse); + ESPmethod [min_ver("1.17")] AccountPermissionsV2(AccountPermissionsV2Request, AccountPermissionsV2Response); ESPmethod [client_xslt("/esp/xslt/access_filepermission.xslt")] FilePermission(FilePermissionRequest, FilePermissionResponse); ESPmethod [depr_ver("1.14"), client_xslt("/esp/xslt/access_permissionresetinput.xslt")] PermissionsResetInput(PermissionsResetInputRequest, PermissionsResetInputResponse); ESPmethod [client_xslt("/esp/xslt/access_permissionsreset.xslt")] PermissionsReset(PermissionsResetRequest, PermissionsResetResponse); diff --git a/esp/services/ws_access/ws_accessService.cpp b/esp/services/ws_access/ws_accessService.cpp index 32d9959a24a..7d3ad43b66d 100644 --- a/esp/services/ws_access/ws_accessService.cpp +++ b/esp/services/ws_access/ws_accessService.cpp @@ -37,6 +37,22 @@ #define MAX_RESOURCES_DISPLAY 3000 static const long MAXXLSTRANSFER = 5000000; +SecResourceType str2RType(const char* str) +{ + if (isEmptyString(str)) + return RT_DEFAULT; + else if (strieq(str, "module")) + return RT_MODULE; + else if (strieq(str, "service")) + return RT_SERVICE; + else if (strieq(str, "file")) + return RT_FILE_SCOPE; + else if (strieq(str, "workunit")) + return RT_WORKUNIT_SCOPE; + else + return RT_DEFAULT; +} + void Cws_accessEx::checkUser(IEspContext& context, CLdapSecManager* secmgr, const char* rtype, const char* rtitle, unsigned int SecAccessFlags) { if (secmgr == nullptr) @@ -4009,6 +4025,462 @@ bool Cws_accessEx::onAccountPermissions(IEspContext &context, IEspAccountPermiss return true; } +//List permissions for a given account in a given BaseDN resource or all BaseDN resources. +//Revised based on onAccountPermissions() which lists permissions for a given account in all BaseDN resources. +bool Cws_accessEx::onAccountPermissionsV2(IEspContext &context, IEspAccountPermissionsV2Request &req, + IEspAccountPermissionsV2Response &resp) +{ + class CAccountsInResource : public CInterface + { + StringAttr resourceName; + StringArray accountNames; + public: + CAccountsInResource(const char *_resourceName) : resourceName(_resourceName) {} + + inline StringArray &getAccountNames() { return accountNames; }; + inline void addUniqueAccountName(const char *name) { accountNames.appendUniq(name); }; + inline bool findAccountName(const char *name) { return accountNames.find(name) != NotFound; } + }; + + class CAccountsInBaseDN : public CInterface + { + StringAttr baseDNName; + CIArrayOf accountsInResources; + public: + CAccountsInBaseDN(const char *_baseDNName) : baseDNName(_baseDNName) {}; + + inline const char *getBaseDNName() { return baseDNName.get(); }; + inline CIArrayOf &getAccountsInResources() { return accountsInResources; }; + }; + + class CAccountPermissionsHelper : public CSimpleInterface + { + IEspContext *context = nullptr; + CLdapSecManager *secMGR = nullptr; + + StringBuffer accountNameReq; + StringAttr baseDNNameReq; + bool isGroupAccountReq = false; + bool includeGroup = false; + + StringArray groupsAccountBelongsTo; + StringAttr moduleBaseDN; //Used by appendAccountPermissionsForCodeGenResource() + CIArrayOf accountsInBaseDNs; //Used by setBaseDNNamesForMissingPermissions(). + bool hasAuthUsersPerm = false; //May change in appendAccountPermission() + bool hasEveryonePerm = false; //May change in appendAccountPermission() + Owned authUsersGroupPermission, everyOneGroupPermission; + IArrayOf resourcesInOneBaseDN; + + bool getResourcePermissions(const char *baseDN, SecResourceType rType, + const char *resourceName, IArrayOf &permissions) + { + bool success = true; + try + { + secMGR->getPermissionsArray(baseDN, rType, resourceName, permissions); + } + catch(IException *e) //exception may be thrown when no permission for the resource + { + e->Release(); + success = false; + } + return success; + } + void readAccountPermissionsInOneBaseDN(IArrayOf &allBaseDNs, + IEspDnStruct &curBaseDN, IArrayOf &accountPermissions, + IArrayOf &groupAccountPermissions) + { + const char *baseDNName = curBaseDN.getName(); + const char *baseDN = curBaseDN.getBasedn(); + const char *rTypeStr = curBaseDN.getRtype(); + SecResourceType rType = str2RType(rTypeStr); + Owned accountsInBaseDN = new CAccountsInBaseDN(baseDNName); + + //Read the resources for the BaseDN Resource. + if (secMGR->getResources(rType, baseDN, resourcesInOneBaseDN)) + { + ForEachItemIn(i, resourcesInOneBaseDN) + { + ISecResource &r = resourcesInOneBaseDN.item(i); + const char *resourceName = r.getName(); + if (isEmptyString(resourceName)) + continue; + + //Use the same code as in onAccountPermissions() to skip some RT_MODULE resources. + //The permission codegenerator.cpp is saved as a service permission (not a module permission) + //when it is added for a user. + if ((rType == RT_MODULE) && (strieq(resourceName, "codegenerator.cpp") || strnicmp(resourceName, "repository", 10))) + continue; + + IArrayOf permissions; + if (getResourcePermissions(baseDN, rType, resourceName, permissions)) //get the permissions for this resource using secMGR->getPermissionsArray() + { + checkAndAppendAccountPermissions(baseDNName, resourceName, permissions, accountPermissions, groupAccountPermissions); + appendAccountsInResources(resourceName, permissions, accountsInBaseDN->getAccountsInResources()); + } + } + }//If failed, log? + + if (rType == RT_WORKUNIT_SCOPE) + appendAccountPermissionsForWUScopeResource(baseDNName, baseDN, accountPermissions, groupAccountPermissions); + else if ((rType == RT_SERVICE) && !moduleBaseDN.isEmpty()) + appendAccountPermissionsForCodeGenResource(baseDNName, moduleBaseDN, accountPermissions, groupAccountPermissions); + + resourcesInOneBaseDN.kill(); //Clean it for possible next BaseDN. + accountsInBaseDNs.append(*accountsInBaseDN.getClear()); + } + void checkAndAppendAccountPermissions(const char *baseDNName, const char *resourceName, + IArrayOf &permissions, IArrayOf &accountPermissions, + IArrayOf &groupAccountPermissions) + { + ForEachItemIn(i, permissions) + { + CPermission &perm = permissions.item(i); + if (doesPermissionAccountMatchThisAccount(perm)) + { //The account in the perm matches with this account. The match means: 1. both accounts + //have the same account name; or 2. this account belongs to a group and the name of the + //group account is the same as the account in the perm. Create an IEspAccountPermission + //using the resourceName and the perm and add it to the permission group where the + //permission belongs to (accountPermissions, authUsersPermissions, etc). + Owned newPermission = createNewAccountPermission(baseDNName, resourceName, perm); + appendAccountPermission(newPermission, perm, accountPermissions, groupAccountPermissions); + } + } + } + bool doesPermissionAccountMatchThisAccount(CPermission &perm) + { + int accountType = perm.getAccount_type(); + if (isGroupAccountReq && accountType == USER_ACT) + return false; //The account in the perm is not a group account. + + const char *actName = perm.getAccount_name(); + if (isEmptyString(actName)) + return false; + + //If the accountType matches with isGroupAccountReq, validate the actName. + if ((!isGroupAccountReq && (accountType == USER_ACT)) || (isGroupAccountReq && (accountType == GROUP_ACT))) + return streq(actName, accountNameReq); //The actName must match with the accountNameReq. + + //Now, there is only one possibility left: isGroupAccountReq = false and accountType = GROUP_ACT. + //isGroupAccountReq = false: the AccountPermissionsForResource call is for an individual account. + //accountType = GROUP_ACT: the perm is for a group account; actName is the group name. + //We need to check whether the individual is a member of this group. + return groupsAccountBelongsTo.find(actName) != NotFound; + } + IEspAccountPermission *createNewAccountPermission(const char *baseDNName, + const char *resourceName, CPermission &perm) + { + //Use the same code as in onAccountPermissions(). + Owned permission = createAccountPermission(); + permission->setBasednName(baseDNName); + permission->setResourceName(resourceName); + + int allows = perm.getAllows(); + int denies = perm.getDenies(); + if((allows & NewSecAccess_Access) == NewSecAccess_Access) + permission->setAllow_access(true); + if((allows & NewSecAccess_Read) == NewSecAccess_Read) + permission->setAllow_read(true); + if((allows & NewSecAccess_Write) == NewSecAccess_Write) + permission->setAllow_write(true); + if((allows & NewSecAccess_Full) == NewSecAccess_Full) + permission->setAllow_full(true); + if((denies & NewSecAccess_Access) == NewSecAccess_Access) + permission->setDeny_access(true); + if((denies & NewSecAccess_Read) == NewSecAccess_Read) + permission->setDeny_read(true); + if((denies & NewSecAccess_Write) == NewSecAccess_Write) + permission->setDeny_write(true); + if((denies & NewSecAccess_Full) == NewSecAccess_Full) + permission->setDeny_full(true); + return permission.getClear(); + } + void appendAccountPermission(IEspAccountPermission *permissionToBeAppended, + CPermission &perm, IArrayOf &accountPermissions, + IArrayOf &groupAccountPermissions) + { + //Use similar logic as in onAccountPermissions(). + //Append the Account Permission (permissionToBeAppended) to accountPermissions, groupAccountPermissions, + //authUsersPermissions, or everyonePermissions. + const char *actName = perm.getAccount_name(); + int accountType = perm.getAccount_type(); + if ((!isGroupAccountReq && accountType == USER_ACT) || (isGroupAccountReq && accountType == GROUP_ACT)) + { + //Append the Account Permission to accountPermissions if: a. the requested account is not a group account + //and this perm is not for a group account; or b. the requested account is a group account and this perm is + //for a group account + accountPermissions.append(*LINK(permissionToBeAppended)); + return; + } + + if (streq(actName, "Authenticated Users")) + { + //Append the Account Permission to authUsersPermissions if this perm is for Authenticated Users. + IArrayOf& authUsersPermissions = authUsersGroupPermission->getPermissions(); + authUsersPermissions.append(*LINK(permissionToBeAppended)); + hasAuthUsersPerm = true; + return; + } + + if (streq(actName, "everyone")) + { + //Append the Account Permission to everyonePermissions if this perm is for everyone. + IArrayOf& everyonePermissions = everyOneGroupPermission->getPermissions(); + everyonePermissions.append(*LINK(permissionToBeAppended)); + hasEveryonePerm = true; + return; + } + + ForEachItemIn(i, groupAccountPermissions) + { + IEspGroupAccountPermission &groupPermission = groupAccountPermissions.item(i); + if (!streq(actName, groupPermission.getGroupName())) + continue; + + //This perm is for a group account which is already in the groupPermission. + //Append the Account Permission into the groupPermission. + IArrayOf &permissions = groupPermission.getPermissions(); + permissions.append(*LINK(permissionToBeAppended)); + return; + } + + //This perm is for a group account which is not in the groupAccountPermissions yet. + //Create a groupPermission. Append the Account Permission into the groupPermission. + //Append the groupPermission to the groupAccountPermissions. + Owned groupPermission = createGroupAccountPermissionEx(actName); + IArrayOf &permissions = groupPermission->getPermissions(); + permissions.append(*LINK(permissionToBeAppended)); + groupAccountPermissions.append(*groupPermission.getLink()); + } + IEspGroupAccountPermission *createGroupAccountPermissionEx(const char *accountName) + { + Owned groupPermission = createGroupAccountPermission(); + groupPermission->setGroupName(accountName); + return groupPermission.getClear(); + } + void appendAccountPermissionsForWUScopeResource(const char *baseDNName, const char *baseDN, + IArrayOf &accountPermissions, + IArrayOf &groupAccountPermissions) + { + //Use the same code as in onAccountPermissions() to find out the deftBaseDN and deftName. + StringBuffer deftBaseDN, deftName; + const char *comma = strchr(baseDN, ','); + const char *eqsign = strchr(baseDN, '='); + if (eqsign != nullptr) + { + if(comma == nullptr) + deftName.append(eqsign + 1); + else + { + deftName.append(comma - eqsign - 1, eqsign + 1); + deftBaseDN.append(comma + 1); + } + } + + //Based on the code in LdapUtils::normalizeDn(), the deftBaseDN can be empty. + if (deftName.isEmpty()) + return; + + IArrayOf permissions; + if (getResourcePermissions(deftBaseDN, RT_WORKUNIT_SCOPE, deftName, permissions)) + checkAndAppendAccountPermissions(baseDNName, deftName, permissions, accountPermissions, groupAccountPermissions); + } + void getModuleBaseDN(IArrayOf &allBaseDNs, StringAttr &moduleBaseDN) + { + //Use the same code as in onAccountPermissions() to find out the moduleBaseDN. + ForEachItemIn(i, allBaseDNs) + { + IEspDnStruct &dn = allBaseDNs.item(i); + const char *aName = dn.getName(); + const char *aBaseDN = dn.getBasedn(); + const char *aRType = dn.getRtype(); + const char *aRtitle = dn.getRtitle(); + if (!isEmptyString(aName) && !isEmptyString(aBaseDN) && !isEmptyString(aRtitle) && + !isEmptyString(aRType) && strieq(aRType, "module")) + { + moduleBaseDN.set(aBaseDN); + break; + } + } + } + void appendAccountPermissionsForCodeGenResource(const char *baseDNName, const char *moduleBaseDN, + IArrayOf &accountPermissions, IArrayOf &groupAccountPermissions) + { + IArrayOf permissions; + if (getResourcePermissions(moduleBaseDN, RT_SERVICE, "codegenerator.cpp", permissions)) + checkAndAppendAccountPermissions(baseDNName, "codegenerator.cpp", permissions, accountPermissions, groupAccountPermissions); + } + //Collect the names of the accounts which have permissions in the resources of a BaseDN. + void appendAccountsInResources(const char *resourceName, IArrayOf &permissions, + CIArrayOf &accountsInResources) + { + Owned accountsInResource = new CAccountsInResource(resourceName); + + ForEachItemIn(i, permissions) + { + CPermission &perm = permissions.item(i); + const char *accountName = perm.getAccount_name(); + int accountType = perm.getAccount_type(); + if (isEmptyString(accountName)) + continue; + + StringBuffer accountNameEx; + if (GROUP_ACT == accountType) + accountNameEx.append("G|"); + accountNameEx.append(accountName); + accountsInResource->addUniqueAccountName(accountNameEx); + } + accountsInResources.append(*accountsInResource.getClear()); + } + //Similar to onAccountPermissions(): + //For the account stored in the accountNameReq and related group accounts, loop + //through every resources in every BaseDNs. For each BaseDN, if the account is + //not set for one of its resources, add the BaseDN name to a BaseDN list of this + //account. A caller may use the list to enable the Add Permision functions for + //the BaseDN. + void setBaseDNNamesForMissingPermissions(IEspAccountPermissionsV2Response &resp, + IArrayOf &groupAccountPermissions) + { + StringArray missingPermissionBasednNames; + getBaseDNNamesForAccountMissingPermissions(accountNameReq, isGroupAccountReq, missingPermissionBasednNames); + if (missingPermissionBasednNames.length() > 0) + resp.setBasednNames(missingPermissionBasednNames); + + ForEachItemIn(i, groupAccountPermissions) + { + IEspGroupAccountPermission &groupPermission = groupAccountPermissions.item(i); + + StringArray basednNames; + getBaseDNNamesForAccountMissingPermissions(groupPermission.getGroupName(), 1, basednNames); + if (basednNames.length() > 0) + groupPermission.setBasednNames(basednNames); + } + } + //For the account stored in the accountName, loop through every resources in every BaseDNs. + //For each BaseDN, if the account is not in one of its resources, add the BaseDN name to the basednNames. + void getBaseDNNamesForAccountMissingPermissions(const char *accountName, bool isGroup, + StringArray &basednNames) + { + StringBuffer accountNameEx; + if (isGroup) + accountNameEx.append("G|"); + accountNameEx.append(accountName); + + //There may be multiple accounts already in each BaseDN. + ForEachItemIn(i, accountsInBaseDNs) + { //for accounts in one BaseDN: + CAccountsInBaseDN &accountsInBaseDN = accountsInBaseDNs.item(i); + //One BaseDN may have multiple resources. + CIArrayOf &accountsInResources = accountsInBaseDN.getAccountsInResources(); + ForEachItemIn(k, accountsInResources) + { //for accounts in one resource winthin BaseDN: + CAccountsInResource &accountsInResource = accountsInResources.item(k); + if (!accountsInResource.findAccountName(accountNameEx)) + { + //Not find the account in this resource. Add the BaseDN name to the basednNames. + basednNames.append(accountsInBaseDN.getBaseDNName()); + break; + } + } + } + } + + public: + CAccountPermissionsHelper(IEspContext *ctx, CLdapSecManager *secmgr) : context(ctx), secMGR(secmgr) { } + + void readReq(IEspAccountPermissionsV2Request &req, const char *accountReq, const char *userID) + { + baseDNNameReq.set(req.getResourceName()); + + isGroupAccountReq = req.getIsGroup(); + if (!isEmptyString(accountReq)) + accountNameReq.set(accountReq); + else + {//send back the permissions for the current user. + accountNameReq.set(userID); + isGroupAccountReq = false; + } + + includeGroup = req.getIncludeGroup(); + if (!isGroupAccountReq && includeGroup) + secMGR->getGroups(accountNameReq, groupsAccountBelongsTo); + groupsAccountBelongsTo.append("Authenticated Users"); + groupsAccountBelongsTo.append("everyone"); + } + + void getAccountPermissions(IArrayOf &allBaseDNs, IEspAccountPermissionsV2Response &resp) + { + //accountPermissions: the permissions for the requested account (accountNameReq). The account + //could be a group account or a personal account. + //groupAccountPermissions: the permissions for group accounts which are not in the accountPermissions, + //the authUsersPermissions and the everyonePermissions. + IArrayOf accountPermissions; + IArrayOf groupAccountPermissions; + + //"Authenticated Users" and "Everyone" are default user groups. Create the permission containers for those default groups. + //The permission containers for other groups are created in appendAccountPermission() when needed. + authUsersGroupPermission.setown(createGroupAccountPermissionEx("Authenticated Users")); + everyOneGroupPermission.setown(createGroupAccountPermissionEx("Everyone")); + + getModuleBaseDN(allBaseDNs, moduleBaseDN); + ForEachItemIn(i, allBaseDNs) + { + IEspDnStruct& curBaseDN = allBaseDNs.item(i); + if (baseDNNameReq.isEmpty()) //Get account permissions for all BaseDNs. + readAccountPermissionsInOneBaseDN(allBaseDNs, curBaseDN, accountPermissions, groupAccountPermissions); + else if (strieq(curBaseDN.getName(), baseDNNameReq.get())) + { + readAccountPermissionsInOneBaseDN(allBaseDNs, curBaseDN, accountPermissions, groupAccountPermissions); + break; + } + } + + if (hasAuthUsersPerm) + groupAccountPermissions.append(*authUsersGroupPermission.getLink()); + + if (hasEveryonePerm) + groupAccountPermissions.append(*everyOneGroupPermission.getLink()); + + setBaseDNNamesForMissingPermissions(resp, groupAccountPermissions); + + if (groupAccountPermissions.length() > 0) + resp.setGroupPermissions(groupAccountPermissions); + + if (accountPermissions.length() > 0) + resp.setPermissions(accountPermissions); + } + }; + + try + { + CLdapSecManager *secMGR = queryLDAPSecurityManager(context); + if (!secMGR) + throw makeStringException(ECLWATCH_INVALID_SEC_MANAGER, MSG_SEC_MANAGER_IS_NULL); + + //Check user and access + StringBuffer userID; + context.getUserID(userID); + if (userID.isEmpty()) + throw makeStringException(ECLWATCH_INVALID_INPUT, "Could not get user ID."); + + const char *accountName = req.getAccountName(); + if (!isEmptyString(accountName) && !streq(accountName, userID.str())) + checkUser(context, secMGR); + + //Make sure BaseDN settings loaded + setBasedns(context); + + CAccountPermissionsHelper helper(&context, secMGR); + helper.readReq(req, accountName, userID); + helper.getAccountPermissions(m_basedns, resp); + } + catch(IException *e) + { + FORWARDEXCEPTION(context, e, ECLWATCH_INTERNAL_ERROR); + } + return true; +} + bool Cws_accessEx::onFilePermission(IEspContext &context, IEspFilePermissionRequest &req, IEspFilePermissionResponse &resp) { try diff --git a/esp/services/ws_access/ws_accessService.hpp b/esp/services/ws_access/ws_accessService.hpp index f67f826218c..7909259d10f 100644 --- a/esp/services/ws_access/ws_accessService.hpp +++ b/esp/services/ws_access/ws_accessService.hpp @@ -92,6 +92,7 @@ class Cws_accessEx : public Cws_access const char* getPasswordExpiration(ISecUser *usr, StringBuffer &passwordExpiration); void checkUser(IEspContext &context, CLdapSecManager *ldapSecMgr, const char *rtype = nullptr, const char *rtitle = nullptr, unsigned int SecAccessFlags = SecAccess_Full); CLdapSecManager* queryLDAPSecurityManagerAndCheckUser(IEspContext &context, const char *rtype = nullptr, const char *rtitle = nullptr, unsigned int SecAccessFlags = SecAccess_Full); + void createResourceArrayForResources(const char *baseDN, SecResourceType rType, IArrayOf &resources, IArrayOf &resourceArray); public: IMPLEMENT_IINTERFACE; @@ -144,6 +145,7 @@ class Cws_accessEx : public Cws_access virtual bool onUserSudoersInput(IEspContext &context, IEspUserSudoersInputRequest &req, IEspUserSudoersInputResponse &resp); virtual bool onUserSudoers(IEspContext &context, IEspUserSudoersRequest &req, IEspUserSudoersResponse &resp); virtual bool onAccountPermissions(IEspContext &context, IEspAccountPermissionsRequest &req, IEspAccountPermissionsResponse &resp); + virtual bool onAccountPermissionsV2(IEspContext &context, IEspAccountPermissionsV2Request &req, IEspAccountPermissionsV2Response &resp); virtual bool onFilePermission(IEspContext &context, IEspFilePermissionRequest &req, IEspFilePermissionResponse &resp); virtual bool onPermissionsResetInput(IEspContext &context, IEspPermissionsResetInputRequest &req, IEspPermissionsResetInputResponse &resp); virtual bool onPermissionsReset(IEspContext &context, IEspPermissionsResetRequest &req, IEspPermissionsResetResponse &resp); From f1cd36745eed7c016e36c3cac767e5b6fcf7e262 Mon Sep 17 00:00:00 2001 From: Michael Gardner Date: Mon, 9 Jan 2023 11:13:20 -0500 Subject: [PATCH 16/21] HPCC-28795 Fix dependencies for R-cran third party libraries Signed-off-by: Michael Gardner --- dockerfiles/vcpkg/build.sh | 10 +++++----- dockerfiles/vcpkg/centos-7.dockerfile | 16 ++++++++++++++-- dockerfiles/vcpkg/centos-8.dockerfile | 10 ++++++++-- dockerfiles/vcpkg/ubuntu-18.04.dockerfile | 9 +++++++-- dockerfiles/vcpkg/ubuntu-20.04.dockerfile | 13 +++++++++++-- dockerfiles/vcpkg/ubuntu-22.04.dockerfile | 9 +++++++-- 6 files changed, 52 insertions(+), 15 deletions(-) diff --git a/dockerfiles/vcpkg/build.sh b/dockerfiles/vcpkg/build.sh index 5f2caddc2ef..ca0a1291e04 100755 --- a/dockerfiles/vcpkg/build.sh +++ b/dockerfiles/vcpkg/build.sh @@ -28,20 +28,20 @@ function doBuild() { docker build --progress plain --pull --rm -f "$SCRIPT_DIR/$1.dockerfile" \ -t build-$1:$GITHUB_REF \ -t build-$1:latest \ + --build-arg DOCKER_NAMESPACE=$DOCKER_USERNAME \ --build-arg VCPKG_REF=$VCPKG_REF \ "$SCRIPT_DIR/." docker run --rm --mount source="$(pwd)",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached build-$1:$GITHUB_REF \ - "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build-$1 \${CMAKE_OPTIONS}" - + "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build-$1 ${CMAKE_OPTIONS}" docker run --rm --mount source="$(pwd)",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached build-$1:$GITHUB_REF \ - "cmake --build \${BUILD_FOLDER} --parallel $(nproc)" + "cmake --build /hpcc-dev/HPCC-Platform/build-$1 --parallel $(nproc)" # docker run -it --mount source="$(pwd)",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached build-ubuntu-22.04:latest bash } -CMAKE_OPTIONS="-DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF -DINCLUDE_PLUGINS=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON" -doBuild ubuntu-22.04 +CMAKE_OPTIONS="-DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF -DINCLUDE_PLUGINS=ON -DSUPPRESS_V8EMBED=ON" +doBuild ubuntu-22.04 doBuild ubuntu-20.04 doBuild ubuntu-18.04 doBuild centos-8 diff --git a/dockerfiles/vcpkg/centos-7.dockerfile b/dockerfiles/vcpkg/centos-7.dockerfile index b728a366d62..a9be88e3696 100644 --- a/dockerfiles/vcpkg/centos-7.dockerfile +++ b/dockerfiles/vcpkg/centos-7.dockerfile @@ -1,9 +1,21 @@ ARG VCPKG_REF=latest -FROM hpccbuilds/vcpkg-centos-7:$VCPKG_REF +ARG DOCKER_NAMESPACE=hpccbuilds +FROM ${DOCKER_NAMESPACE}/vcpkg-centos-7:$VCPKG_REF RUN yum install -y \ java-11-openjdk-devel \ - python3-devel + python3-devel \ + wget \ + epel-release +RUN yum update -y && yum install -y R-core-devel + +ENV Rcpp_package=Rcpp_0.12.19.tar.gz +ENV RInside_package=RInside_0.2.12.tar.gz + +RUN wget https://cran.r-project.org/src/contrib/Archive/Rcpp/${Rcpp_package} +RUN wget https://cran.r-project.org/src/contrib/Archive/RInside/${RInside_package} +RUN R CMD INSTALL ${Rcpp_package} ${RInside_package} +RUN rm -f ${Rcpp_package} ${RInside_package} WORKDIR /hpcc-dev diff --git a/dockerfiles/vcpkg/centos-8.dockerfile b/dockerfiles/vcpkg/centos-8.dockerfile index d0c75fed4e9..9d67ebbf010 100644 --- a/dockerfiles/vcpkg/centos-8.dockerfile +++ b/dockerfiles/vcpkg/centos-8.dockerfile @@ -1,9 +1,15 @@ ARG VCPKG_REF=latest -FROM hpccbuilds/vcpkg-centos-8:$VCPKG_REF +ARG DOCKER_NAMESPACE=hpccbuilds +FROM ${DOCKER_NAMESPACE}/vcpkg-centos-8:$VCPKG_REF RUN yum remove -y java-1.* && yum install -y \ java-11-openjdk-devel \ - python3-devel + python3-devel \ + epel-release +RUN yum install -y \ + R-core-devel \ + R-Rcpp-devel \ + R-RInside-devel WORKDIR /hpcc-dev diff --git a/dockerfiles/vcpkg/ubuntu-18.04.dockerfile b/dockerfiles/vcpkg/ubuntu-18.04.dockerfile index 4a53639bf13..fcc1d6a1eb5 100644 --- a/dockerfiles/vcpkg/ubuntu-18.04.dockerfile +++ b/dockerfiles/vcpkg/ubuntu-18.04.dockerfile @@ -1,9 +1,14 @@ ARG VCPKG_REF=latest -FROM hpccbuilds/vcpkg-ubuntu-18.04:$VCPKG_REF +ARG DOCKER_NAMESPACE=hpccbuilds +FROM ${DOCKER_NAMESPACE}/vcpkg-ubuntu-18.04:$VCPKG_REF RUN apt-get update && apt-get install --no-install-recommends -y \ default-jdk \ - python3-dev + python3-dev \ + r-base \ + r-cran-rcpp \ + r-cran-rinside \ + r-cran-inline WORKDIR /hpcc-dev diff --git a/dockerfiles/vcpkg/ubuntu-20.04.dockerfile b/dockerfiles/vcpkg/ubuntu-20.04.dockerfile index e69381ea94e..2ef5570931e 100644 --- a/dockerfiles/vcpkg/ubuntu-20.04.dockerfile +++ b/dockerfiles/vcpkg/ubuntu-20.04.dockerfile @@ -1,9 +1,18 @@ ARG VCPKG_REF=latest -FROM hpccbuilds/vcpkg-ubuntu-20.04:$VCPKG_REF +ARG DOCKER_NAMESPACE=hpccbuilds +FROM ${DOCKER_NAMESPACE}/vcpkg-ubuntu-20.04:$VCPKG_REF + +ENV RInside_package=RInside_0.2.14.tar.gz RUN apt-get update && apt-get install --no-install-recommends -y \ default-jdk \ - python3-dev + python3-dev \ + wget \ + r-base \ + r-cran-rcpp +RUN wget https://cran.r-project.org/src/contrib/Archive/RInside/${RInside_package} +RUN R CMD INSTALL ${RInside_package} +RUN rm -f ${RInside_package} WORKDIR /hpcc-dev diff --git a/dockerfiles/vcpkg/ubuntu-22.04.dockerfile b/dockerfiles/vcpkg/ubuntu-22.04.dockerfile index c3a0019edb4..86f79fdea88 100644 --- a/dockerfiles/vcpkg/ubuntu-22.04.dockerfile +++ b/dockerfiles/vcpkg/ubuntu-22.04.dockerfile @@ -1,9 +1,14 @@ ARG VCPKG_REF=latest -FROM hpccbuilds/vcpkg-ubuntu-22.04:$VCPKG_REF +ARG DOCKER_NAMESPACE=hpccbuilds +FROM ${DOCKER_NAMESPACE}/vcpkg-ubuntu-22.04:${VCPKG_REF} RUN apt-get update && apt-get install --no-install-recommends -y \ default-jdk \ - python3-dev + python3-dev \ + r-base \ + r-cran-rcpp \ + r-cran-rinside \ + r-cran-inline WORKDIR /hpcc-dev From b5bdb8b90977b34e7d7806f529d85aafcb507655 Mon Sep 17 00:00:00 2001 From: Richard Chapman Date: Mon, 23 Jan 2023 11:34:25 +0000 Subject: [PATCH 17/21] HPCC-28821 Build errors on latest Ubuntu compiler due to false-positive warning Signed-off-by: Richard Chapman --- tools/esdlcomp/CMakeLists.txt | 1 + tools/hidl/CMakeLists.txt | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/tools/esdlcomp/CMakeLists.txt b/tools/esdlcomp/CMakeLists.txt index bf855bec51a..a38fda7ba4c 100644 --- a/tools/esdlcomp/CMakeLists.txt +++ b/tools/esdlcomp/CMakeLists.txt @@ -52,6 +52,7 @@ if (CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_CLANG) ADD_DEFINITIONS( -O0 ) set_source_files_properties(${CMAKE_CURRENT_BINARY_DIR}/esdllex.cpp PROPERTIES COMPILE_FLAGS "-Wno-sign-compare -Wno-unused-function -Wno-unneeded-internal-declaration") set_source_files_properties(esdlcomp.cpp PROPERTIES COMPILE_FLAGS "-Wno-unused-function") + set_source_files_properties(esdlgram.cpp PROPERTIES COMPILE_FLAGS "-Wno-free-nonheap-object") endif () HPCC_ADD_LIBRARY ( esdlcomp SHARED ${SRCS} ) diff --git a/tools/hidl/CMakeLists.txt b/tools/hidl/CMakeLists.txt index 8843803e13e..07e52b208ea 100644 --- a/tools/hidl/CMakeLists.txt +++ b/tools/hidl/CMakeLists.txt @@ -51,6 +51,11 @@ include_directories ( set_source_files_properties (${CMAKE_CURRENT_BINARY_DIR}/hidlgram.cpp PROPERTIES COMPILE_FLAGS -fno-strict-aliasing) SET (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${STRICT_CXX_FLAGS}") + +if (CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_CLANG) + set_source_files_properties(hidlgram.cpp PROPERTIES COMPILE_FLAGS "-Wno-free-nonheap-object") +endif () + ADD_DEFINITIONS( -D_CONSOLE ) HPCC_ADD_EXECUTABLE ( hidl ${SRCS} ) #install ( TARGETS hidl RUNTIME DESTINATION ${EXEC_DIR} ) From 793f5072d629cb1e49f095a5539ad19c0583b2da Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Jan 2023 03:56:18 +0000 Subject: [PATCH 18/21] Bump json5 from 2.2.0 to 2.2.3 in /esp/src Bumps [json5](https://github.com/json5/json5) from 2.2.0 to 2.2.3. - [Release notes](https://github.com/json5/json5/releases) - [Changelog](https://github.com/json5/json5/blob/main/CHANGELOG.md) - [Commits](https://github.com/json5/json5/compare/v2.2.0...v2.2.3) --- updated-dependencies: - dependency-name: json5 dependency-type: indirect ... Signed-off-by: dependabot[bot] --- esp/src/package-lock.json | 32 +++++++------------------------- 1 file changed, 7 insertions(+), 25 deletions(-) diff --git a/esp/src/package-lock.json b/esp/src/package-lock.json index b61b8ba46ae..9514f3f82bd 100644 --- a/esp/src/package-lock.json +++ b/esp/src/package-lock.json @@ -5046,13 +5046,10 @@ "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" }, "node_modules/json5": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", - "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", "dev": true, - "dependencies": { - "minimist": "^1.2.5" - }, "bin": { "json5": "lib/cli.js" }, @@ -5842,12 +5839,6 @@ "node": "*" } }, - "node_modules/minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", - "dev": true - }, "node_modules/minipass": { "version": "3.1.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", @@ -12579,13 +12570,10 @@ "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" }, "json5": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", - "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", - "dev": true, - "requires": { - "minimist": "^1.2.5" - } + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true }, "jsonparse": { "version": "1.3.1", @@ -13215,12 +13203,6 @@ "brace-expansion": "^1.1.7" } }, - "minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", - "dev": true - }, "minipass": { "version": "3.1.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", From 3792a50fce6bdcdfb42fc0fbe0201d8d4a8e9ec2 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Thu, 19 Jan 2023 10:01:27 +0000 Subject: [PATCH 19/21] HPCC-28793 Refactor docker base build tagging New format: hpccsystems/platform-build-base-centos-7:2022.11.14-rc5 Signed-off-by: Gordon Smith --- .github/workflows/build-assets.yml | 28 +++-- .github/workflows/build-containers-pr.yml | 2 +- .../build-containers-target-branch.yml | 2 +- dockerfiles/buildall-common.sh | 2 +- dockerfiles/platform-build-base/Dockerfile | 107 ------------------ dockerfiles/platform-build/Dockerfile | 2 +- dockerfiles/vcpkg/amazonlinux.dockerfile | 2 +- dockerfiles/vcpkg/build.sh | 6 +- dockerfiles/vcpkg/centos-7.dockerfile | 3 +- dockerfiles/vcpkg/centos-8.dockerfile | 3 +- dockerfiles/vcpkg/ubuntu-18.04.dockerfile | 3 +- dockerfiles/vcpkg/ubuntu-20.04.dockerfile | 3 +- dockerfiles/vcpkg/ubuntu-22.04.dockerfile | 3 +- dockerfiles/vcpkg/ubuntu-22.10.dockerfile | 2 +- vcpkg | 2 +- vcpkg.json | 4 +- 16 files changed, 36 insertions(+), 138 deletions(-) delete mode 100644 dockerfiles/platform-build-base/Dockerfile diff --git a/.github/workflows/build-assets.yml b/.github/workflows/build-assets.yml index 40a553fa45d..f4cd05a1b60 100644 --- a/.github/workflows/build-assets.yml +++ b/.github/workflows/build-assets.yml @@ -38,7 +38,6 @@ jobs: generateReleaseNotes: false prerelease: ${{ contains(github.ref, '-rc') }} - build-platform: name: Build Platform needs: release @@ -71,11 +70,20 @@ jobs: run: | echo 'mount_platform=source="${{ github.workspace }}",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached' >> $GITHUB_OUTPUT echo 'mount_ln=source="${{ github.workspace }}/LN",target=/hpcc-dev/LN,type=bind,consistency=cached' >> $GITHUB_OUTPUT - echo "branch_name=$(echo ${{ github.ref }} | cut -d'/' -f3)" >> $GITHUB_OUTPUT + community_ref=${{ github.ref }} + echo "community_ref=$community_ref" >> $GITHUB_OUTPUT + echo "internal_ref=$(echo $community_ref | sed 's/community/internal/')" >> $GITHUB_OUTPUT + community_tag=$(echo $community_ref | cut -d'/' -f3) + echo "community_tag=$community_tag" >> $GITHUB_OUTPUT + echo "internal_tag=$(echo $community_tag | sed 's/community/internal/')" >> $GITHUB_OUTPUT cd vcpkg echo "vcpkg_sha_short=$(git rev-parse --short=8 HEAD)" >> $GITHUB_OUTPUT echo "cmake_options=-DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF" >> $GITHUB_OUTPUT + - name: Print vars + run: | + echo "${{ toJSON(steps.vars.outputs) }})" + - name: Set up Docker Buildx id: buildx uses: docker/setup-buildx-action@v2 @@ -94,10 +102,10 @@ jobs: context: dockerfiles/vcpkg push: true tags: | - ${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.branch_name }} + ${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.community_tag }} ${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:latest cache-from: | - ${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.branch_name }} + ${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.community_tag }} ${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:latest build-args: | VCPKG_REF=${{ steps.vars.outputs.vcpkg_sha_short }} @@ -110,7 +118,7 @@ jobs: for plugin in "${plugins[@]}"; do sudo rm -f ./build/CMakeCache.txt sudo rm -rf ./build/CMakeFiles - docker_label=${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.branch_name }} + docker_label=${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.community_tag }} docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build ${{ steps.vars.outputs.cmake_options }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=OFF" docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build ${{ steps.vars.outputs.cmake_options }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=ON" @@ -122,11 +130,11 @@ jobs: run: | sudo rm -f ./build/CMakeCache.txt sudo rm -rf ./build/CMakeFiles - docker_label=${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.branch_name }} + docker_label=${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.community_tag }} docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build ${{ steps.vars.outputs.cmake_options }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF" docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build ${{ steps.vars.outputs.cmake_options }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON" - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" + docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" - name: Upload Assets uses: ncipollo/release-action@v1.12.0 @@ -140,7 +148,7 @@ jobs: uses: actions/checkout@v3 with: repository: ${{ github.repository_owner }}/LN - ref: ${{ github.ref }} + ref: ${{ steps.vars.outputs.internal_ref }} path: ${{ github.workspace }}/LN token: ${{ secrets.LNB_TOKEN }} @@ -149,11 +157,11 @@ jobs: run: | sudo rm -f ./build/CMakeCache.txt sudo rm -rf ./build/CMakeFiles - docker_label=${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.branch_name }} + docker_label=${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.community_tag }} docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} --mount ${{ steps.vars.outputs.mount_ln }} $docker_label "cmake -S /hpcc-dev/LN -B /hpcc-dev/HPCC-Platform/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ steps.vars.outputs.cmake_options }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF" docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} --mount ${{ steps.vars.outputs.mount_ln }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} --mount ${{ steps.vars.outputs.mount_ln }} $docker_label "cmake -S /hpcc-dev/LN -B /hpcc-dev/HPCC-Platform/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ steps.vars.outputs.cmake_options }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON" - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} --mount ${{ steps.vars.outputs.mount_ln }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" + docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} --mount ${{ steps.vars.outputs.mount_ln }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" - name: Upload LN Assets if: ${{ matrix.ln }} diff --git a/.github/workflows/build-containers-pr.yml b/.github/workflows/build-containers-pr.yml index d28fcf7a552..e8ec908bb27 100644 --- a/.github/workflows/build-containers-pr.yml +++ b/.github/workflows/build-containers-pr.yml @@ -36,7 +36,7 @@ jobs: - name: vars id: vars run: | - echo ::set-output name=base_ver::8.6 + echo ::set-output name=base_ver::2022.11.14-rc5 # echo ::set-output name=container_registry::ghcr.io # echo ::set-output name=cr_user::${{ github.repository_owner }} echo ::set-output name=container_registry::docker.io diff --git a/.github/workflows/build-containers-target-branch.yml b/.github/workflows/build-containers-target-branch.yml index 46e5afc1820..6ed02b9117d 100644 --- a/.github/workflows/build-containers-target-branch.yml +++ b/.github/workflows/build-containers-target-branch.yml @@ -36,7 +36,7 @@ jobs: - name: vars id: vars run: | - echo ::set-output name=base_ver::8.6 + echo ::set-output name=base_ver::2022.11.14-rc5 # echo ::set-output name=container_registry::ghcr.io # echo ::set-output name=cr_user::${{ github.repository_owner }} echo ::set-output name=container_registry::docker.io diff --git a/dockerfiles/buildall-common.sh b/dockerfiles/buildall-common.sh index 42b3f9300ac..8a64695faf6 100755 --- a/dockerfiles/buildall-common.sh +++ b/dockerfiles/buildall-common.sh @@ -20,7 +20,7 @@ # Build script to create and publish Docker containers corresponding to a GitHub tag # This script is normally invoked via GitHub actions, whenever a new tag is pushed -BASE_VER=8.6 # The docker hub label for the platform-build-base image. Changes rarely. +BASE_VER=2022.11.14-rc5 # The docker hub label for the platform-build-base image. Changes rarely. BUILD_TAG=$(git describe --exact-match --tags || true) # The git tag for the images we are building BUILD_LABEL=${BUILD_TAG} # The docker hub label for all other components BUILD_USER=hpcc-systems # The github repo owner diff --git a/dockerfiles/platform-build-base/Dockerfile b/dockerfiles/platform-build-base/Dockerfile deleted file mode 100644 index 43a664fe4c8..00000000000 --- a/dockerfiles/platform-build-base/Dockerfile +++ /dev/null @@ -1,107 +0,0 @@ -############################################################################## -# -# HPCC SYSTEMS software Copyright (C) 2020 HPCC Systems®. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -############################################################################## - -# Build container image containing all 3rd-party tools required to build HPCC platform - -FROM ubuntu:20.04 as build -ENV DEBIAN_FRONTEND=noninteractive -ARG BASE_VER - -# This is version 8.6 of the platform-build-base, and anyone building/using it should have set that in BASE_VER -RUN [ "${BASE_VER}" = "8.6" ] - -RUN apt clean -y && \ - apt autoclean -y && \ - apt install -y -f && \ - apt autoremove -y && \ - apt-get update -y - -RUN apt-get install -y \ - automake \ - autotools-dev \ - binutils-dev \ - bison \ - build-essential \ - curl \ - default-jdk \ - default-libmysqlclient-dev \ - flex \ - libapr1-dev \ - libaprutil1-dev \ - libarchive-dev \ - libatlas-base-dev \ - libblas-dev \ - libboost-regex-dev \ - libcppunit-dev \ - libcurl4-openssl-dev \ - libevent-dev \ - libhiredis-dev \ - libiberty-dev \ - libicu-dev \ - libldap2-dev \ - libmemcached-dev \ - libnuma-dev \ - libsqlite3-dev \ - libssl-dev \ - libtbb-dev \ - libtool \ - libv8-dev \ - libxalan-c-dev \ - libxslt1-dev \ - pkg-config \ - python-dev \ - python3-dev \ - r-base-dev \ - r-cran-inline \ - r-cran-rcpp \ - r-cran-rinside \ - rsync \ - psmisc \ - libpopt0 \ - zlib1g-dev - -RUN apt-get install -y \ - bash-completion \ - expect \ - git \ - nano \ - valgrind \ - sudo \ - vim \ - gdb \ - software-properties-common \ - lsb-release \ - jq - -RUN curl https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - -RUN apt-add-repository "deb https://deb.nodesource.com/node_16.x $(lsb_release -sc) main" -RUN apt-get update -y -RUN apt-get install -y nodejs - -RUN mkdir /home/temp -WORKDIR /home/temp -RUN curl https://cmake.org/files/v3.16/cmake-3.16.3.tar.gz | tar xvz -WORKDIR /home/temp/cmake-3.16.3/ -RUN ./bootstrap -RUN make -j4 -RUN make install -WORKDIR / -RUN rm -rf /home/temp -RUN apt-get clean -y - -FROM ubuntu:20.04 -COPY --from=build / / diff --git a/dockerfiles/platform-build/Dockerfile b/dockerfiles/platform-build/Dockerfile index 8aba8d0cf29..75086d2bf12 100644 --- a/dockerfiles/platform-build/Dockerfile +++ b/dockerfiles/platform-build/Dockerfile @@ -17,7 +17,7 @@ # Base container image that builds all HPCC platform components -ARG BASE_VER=8.12-rc2 +ARG BASE_VER=2022.11.14-rc5 ARG CR_USER=hpccsystems ARG CR_REPO=docker.io ARG CR_CONTAINER_NAME=platform-build-base diff --git a/dockerfiles/vcpkg/amazonlinux.dockerfile b/dockerfiles/vcpkg/amazonlinux.dockerfile index ee40387a718..29034fa165a 100644 --- a/dockerfiles/vcpkg/amazonlinux.dockerfile +++ b/dockerfiles/vcpkg/amazonlinux.dockerfile @@ -1,5 +1,5 @@ ARG VCPKG_REF=latest -FROM hpccbuilds/vcpkg-amazonlinux:$VCPKG_REF +FROM hpccsystems/platform-build-base-amazonlinux:$VCPKG_REF RUN amazon-linux-extras install java-openjdk11 && yum install -y \ java-11-openjdk-devel \ diff --git a/dockerfiles/vcpkg/build.sh b/dockerfiles/vcpkg/build.sh index ca0a1291e04..43303461e90 100755 --- a/dockerfiles/vcpkg/build.sh +++ b/dockerfiles/vcpkg/build.sh @@ -22,7 +22,7 @@ echo "VCPKG_REF: $VCPKG_REF" echo "DOCKER_USERNAME: $DOCKER_USERNAME" echo "DOCKER_PASSWORD: $DOCKER_PASSWORD" -docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD +# docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD function doBuild() { docker build --progress plain --pull --rm -f "$SCRIPT_DIR/$1.dockerfile" \ @@ -41,7 +41,9 @@ function doBuild() { } CMAKE_OPTIONS="-DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF -DINCLUDE_PLUGINS=ON -DSUPPRESS_V8EMBED=ON" -doBuild ubuntu-22.04 + +doBuild amazonlinux +doBuild ubuntu-22.04 doBuild ubuntu-20.04 doBuild ubuntu-18.04 doBuild centos-8 diff --git a/dockerfiles/vcpkg/centos-7.dockerfile b/dockerfiles/vcpkg/centos-7.dockerfile index a9be88e3696..83791b07498 100644 --- a/dockerfiles/vcpkg/centos-7.dockerfile +++ b/dockerfiles/vcpkg/centos-7.dockerfile @@ -1,6 +1,5 @@ ARG VCPKG_REF=latest -ARG DOCKER_NAMESPACE=hpccbuilds -FROM ${DOCKER_NAMESPACE}/vcpkg-centos-7:$VCPKG_REF +FROM hpccsystems/platform-build-base-centos-7:$VCPKG_REF RUN yum install -y \ java-11-openjdk-devel \ diff --git a/dockerfiles/vcpkg/centos-8.dockerfile b/dockerfiles/vcpkg/centos-8.dockerfile index 9d67ebbf010..1e22c7c3466 100644 --- a/dockerfiles/vcpkg/centos-8.dockerfile +++ b/dockerfiles/vcpkg/centos-8.dockerfile @@ -1,6 +1,5 @@ ARG VCPKG_REF=latest -ARG DOCKER_NAMESPACE=hpccbuilds -FROM ${DOCKER_NAMESPACE}/vcpkg-centos-8:$VCPKG_REF +FROM hpccsystems/platform-build-base-centos-8:$VCPKG_REF RUN yum remove -y java-1.* && yum install -y \ java-11-openjdk-devel \ diff --git a/dockerfiles/vcpkg/ubuntu-18.04.dockerfile b/dockerfiles/vcpkg/ubuntu-18.04.dockerfile index fcc1d6a1eb5..a9695dc1e2f 100644 --- a/dockerfiles/vcpkg/ubuntu-18.04.dockerfile +++ b/dockerfiles/vcpkg/ubuntu-18.04.dockerfile @@ -1,6 +1,5 @@ ARG VCPKG_REF=latest -ARG DOCKER_NAMESPACE=hpccbuilds -FROM ${DOCKER_NAMESPACE}/vcpkg-ubuntu-18.04:$VCPKG_REF +FROM hpccsystems/platform-build-base-ubuntu-18.04:$VCPKG_REF RUN apt-get update && apt-get install --no-install-recommends -y \ default-jdk \ diff --git a/dockerfiles/vcpkg/ubuntu-20.04.dockerfile b/dockerfiles/vcpkg/ubuntu-20.04.dockerfile index 2ef5570931e..7211cccebde 100644 --- a/dockerfiles/vcpkg/ubuntu-20.04.dockerfile +++ b/dockerfiles/vcpkg/ubuntu-20.04.dockerfile @@ -1,6 +1,5 @@ ARG VCPKG_REF=latest -ARG DOCKER_NAMESPACE=hpccbuilds -FROM ${DOCKER_NAMESPACE}/vcpkg-ubuntu-20.04:$VCPKG_REF +FROM hpccsystems/platform-build-base-ubuntu-20.04:$VCPKG_REF ENV RInside_package=RInside_0.2.14.tar.gz diff --git a/dockerfiles/vcpkg/ubuntu-22.04.dockerfile b/dockerfiles/vcpkg/ubuntu-22.04.dockerfile index 86f79fdea88..1b279204680 100644 --- a/dockerfiles/vcpkg/ubuntu-22.04.dockerfile +++ b/dockerfiles/vcpkg/ubuntu-22.04.dockerfile @@ -1,6 +1,5 @@ ARG VCPKG_REF=latest -ARG DOCKER_NAMESPACE=hpccbuilds -FROM ${DOCKER_NAMESPACE}/vcpkg-ubuntu-22.04:${VCPKG_REF} +FROM hpccsystems/platform-build-base-ubuntu-22.04:$VCPKG_REF RUN apt-get update && apt-get install --no-install-recommends -y \ default-jdk \ diff --git a/dockerfiles/vcpkg/ubuntu-22.10.dockerfile b/dockerfiles/vcpkg/ubuntu-22.10.dockerfile index b558655ffd6..4d9bacc56c6 100644 --- a/dockerfiles/vcpkg/ubuntu-22.10.dockerfile +++ b/dockerfiles/vcpkg/ubuntu-22.10.dockerfile @@ -1,5 +1,5 @@ ARG VCPKG_REF=latest -FROM hpccbuilds/vcpkg-ubuntu-22.10:$VCPKG_REF +FROM hpccsystems/platform-build-base-ubuntu-22.10:$VCPKG_REF RUN apt-get update && apt-get install --no-install-recommends -y \ default-jdk \ diff --git a/vcpkg b/vcpkg index 348818aeb27..d6209d9f6c8 160000 --- a/vcpkg +++ b/vcpkg @@ -1 +1 @@ -Subproject commit 348818aeb27f6204e3abb185c11094c773f0066b +Subproject commit d6209d9f6c80a9bbbf195f3bf3fbdce72462887e diff --git a/vcpkg.json b/vcpkg.json index 0a64fdb9430..80a1a90c3b8 100644 --- a/vcpkg.json +++ b/vcpkg.json @@ -1,7 +1,7 @@ { "$schema": "https://raw.githubusercontent.com/microsoft/vcpkg/master/scripts/vcpkg.schema.json", "name": "hpcc-platform", - "version": "8.8.0", + "version": "8.12.0", "dependencies": [ "apr", "apr-util", @@ -100,4 +100,4 @@ }, "zlib" ] -} \ No newline at end of file +} From 2d9c4d962caba33504ef3d5ee43a2bf35311d3e0 Mon Sep 17 00:00:00 2001 From: Mark Kelly Date: Mon, 23 Jan 2023 18:20:35 -0500 Subject: [PATCH 20/21] HPCC-28828 Case insensitive query names (-i) Signed-off-by: Mark Kelly --- tools/roxie/extract-roxie-timings.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tools/roxie/extract-roxie-timings.py b/tools/roxie/extract-roxie-timings.py index 552fd528041..1841abb4011 100755 --- a/tools/roxie/extract-roxie-timings.py +++ b/tools/roxie/extract-roxie-timings.py @@ -85,10 +85,12 @@ def printRow(curRow): parser.add_argument("--all", "-a", help="Combine all services into a single result", action='store_true') parser.add_argument("--nosummary", "-n", help="Avoid including a summary", action='store_true') parser.add_argument("--summaryonly", "-s", help="Only generate a summary", action='store_true') + parser.add_argument("--ignorecase", "-i", help="Use case-insensitve query names", action='store_true') args = parser.parse_args() combineServices = args.all suppressDetails = args.summaryonly reportSummary = not args.nosummary or args.summaryonly + ignoreQueryCase = args.ignorecase csv.field_size_limit(0x100000) with open(args.filename, encoding='latin1') as csv_file: @@ -104,6 +106,9 @@ def printRow(curRow): mapping = rowText.split(); serviceName = completeMatch.group(1) + if ignoreQueryCase: + serviceName = serviceName.lower() + idMatch = idPattern.search(mapping[0]) if idMatch: if combineServices: From 9a47e7e75d000d18e35c278d0c143f5441876793 Mon Sep 17 00:00:00 2001 From: Gordon Smith Date: Tue, 24 Jan 2023 09:35:10 +0000 Subject: [PATCH 21/21] HPCC-28832 Refactor build_asset vars Add LN client tools building Signed-off-by: Gordon Smith --- .github/workflows/build-assets.yml | 113 +++++++++++++++------ dockerfiles/platform-build-base/Dockerfile | 109 ++++++++++++++++++++ 2 files changed, 189 insertions(+), 33 deletions(-) create mode 100644 dockerfiles/platform-build-base/Dockerfile diff --git a/.github/workflows/build-assets.yml b/.github/workflows/build-assets.yml index f4cd05a1b60..322e905fdde 100644 --- a/.github/workflows/build-assets.yml +++ b/.github/workflows/build-assets.yml @@ -23,10 +23,36 @@ jobs: runs-on: ubuntu-22.04 permissions: contents: write + outputs: + mount_platform: ${{ steps.vars.outputs.mount_platform }} + mount_ln: ${{ steps.vars.outputs.mount_ln }} + community_ref: ${{ steps.vars.outputs.community_ref }} + internal_ref: ${{ steps.vars.outputs.internal_ref }} + community_tag: ${{ steps.vars.outputs.community_tag }} + internal_tag: ${{ steps.vars.outputs.internal_tag }} + cmake_options: ${{ steps.vars.outputs.cmake_options }} steps: + - name: Calculate vars + id: vars + run: | + echo 'mount_platform=source="${{ github.workspace }}",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached' >> $GITHUB_OUTPUT + echo 'mount_ln=source="${{ github.workspace }}/LN",target=/hpcc-dev/LN,type=bind,consistency=cached' >> $GITHUB_OUTPUT + community_ref=${{ github.ref }} + echo "community_ref=$community_ref" >> $GITHUB_OUTPUT + echo "internal_ref=$(echo $community_ref | sed 's/community/internal/')" >> $GITHUB_OUTPUT + community_tag=$(echo $community_ref | cut -d'/' -f3) + echo "community_tag=$community_tag" >> $GITHUB_OUTPUT + echo "internal_tag=$(echo $community_tag | sed 's/community/internal/')" >> $GITHUB_OUTPUT + echo "cmake_options=-DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF" >> $GITHUB_OUTPUT + + - name: Print vars + run: | + echo "${{ toJSON(steps.vars.outputs) }})" + - name: Release HPCC-Platform uses: ncipollo/release-action@v1.12.0 with: + allowUpdates: true generateReleaseNotes: false prerelease: ${{ contains(github.ref, '-rc') }} - name: Release LN @@ -35,6 +61,8 @@ jobs: owner: ${{ secrets.LNB_ACTOR }} repo: LN token: ${{ secrets.LNB_TOKEN }} + tag: ${{ steps.vars.outputs.internal_tag }} + allowUpdates: true generateReleaseNotes: false prerelease: ${{ contains(github.ref, '-rc') }} @@ -68,20 +96,12 @@ jobs: - name: Calculate vars id: vars run: | - echo 'mount_platform=source="${{ github.workspace }}",target=/hpcc-dev/HPCC-Platform,type=bind,consistency=cached' >> $GITHUB_OUTPUT - echo 'mount_ln=source="${{ github.workspace }}/LN",target=/hpcc-dev/LN,type=bind,consistency=cached' >> $GITHUB_OUTPUT - community_ref=${{ github.ref }} - echo "community_ref=$community_ref" >> $GITHUB_OUTPUT - echo "internal_ref=$(echo $community_ref | sed 's/community/internal/')" >> $GITHUB_OUTPUT - community_tag=$(echo $community_ref | cut -d'/' -f3) - echo "community_tag=$community_tag" >> $GITHUB_OUTPUT - echo "internal_tag=$(echo $community_tag | sed 's/community/internal/')" >> $GITHUB_OUTPUT cd vcpkg echo "vcpkg_sha_short=$(git rev-parse --short=8 HEAD)" >> $GITHUB_OUTPUT - echo "cmake_options=-DCMAKE_BUILD_TYPE=RelWithDebInfo -DVCPKG_FILES_DIR=/hpcc-dev -DCPACK_THREADS=0 -DUSE_OPTIONAL=OFF" >> $GITHUB_OUTPUT - name: Print vars run: | + echo "${{ toJSON(needs.release.outputs) }})" echo "${{ toJSON(steps.vars.outputs) }})" - name: Set up Docker Buildx @@ -102,10 +122,10 @@ jobs: context: dockerfiles/vcpkg push: true tags: | - ${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.community_tag }} + ${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ needs.release.outputs.community_tag }} ${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:latest cache-from: | - ${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.community_tag }} + ${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ needs.release.outputs.community_tag }} ${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:latest build-args: | VCPKG_REF=${{ steps.vars.outputs.vcpkg_sha_short }} @@ -118,11 +138,11 @@ jobs: for plugin in "${plugins[@]}"; do sudo rm -f ./build/CMakeCache.txt sudo rm -rf ./build/CMakeFiles - docker_label=${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.community_tag }} - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build ${{ steps.vars.outputs.cmake_options }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=OFF" - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build ${{ steps.vars.outputs.cmake_options }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=ON" - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" + docker_label=${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ needs.release.outputs.community_tag }} + docker run --rm --mount ${{ needs.release.outputs.mount_platform }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build ${{ needs.release.outputs.cmake_options }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=OFF" + docker run --rm --mount ${{ needs.release.outputs.mount_platform }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" + docker run --rm --mount ${{ needs.release.outputs.mount_platform }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build ${{ needs.release.outputs.cmake_options }} -D$plugin=ON -DCONTAINERIZED=OFF -DCPACK_STRIP_FILES=ON" + docker run --rm --mount ${{ needs.release.outputs.mount_platform }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" done - name: CMake Containerized Packages @@ -130,11 +150,11 @@ jobs: run: | sudo rm -f ./build/CMakeCache.txt sudo rm -rf ./build/CMakeFiles - docker_label=${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.community_tag }} - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build ${{ steps.vars.outputs.cmake_options }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF" - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build ${{ steps.vars.outputs.cmake_options }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON" - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" + docker_label=${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ needs.release.outputs.community_tag }} + docker run --rm --mount ${{ needs.release.outputs.mount_platform }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build ${{ needs.release.outputs.cmake_options }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF" + docker run --rm --mount ${{ needs.release.outputs.mount_platform }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" + docker run --rm --mount ${{ needs.release.outputs.mount_platform }} $docker_label "cmake -S /hpcc-dev/HPCC-Platform -B /hpcc-dev/HPCC-Platform/build ${{ needs.release.outputs.cmake_options }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=ON -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON" + docker run --rm --mount ${{ needs.release.outputs.mount_platform }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" - name: Upload Assets uses: ncipollo/release-action@v1.12.0 @@ -148,7 +168,7 @@ jobs: uses: actions/checkout@v3 with: repository: ${{ github.repository_owner }}/LN - ref: ${{ steps.vars.outputs.internal_ref }} + ref: ${{ needs.release.outputs.internal_ref }} path: ${{ github.workspace }}/LN token: ${{ secrets.LNB_TOKEN }} @@ -157,11 +177,11 @@ jobs: run: | sudo rm -f ./build/CMakeCache.txt sudo rm -rf ./build/CMakeFiles - docker_label=${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ steps.vars.outputs.community_tag }} - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} --mount ${{ steps.vars.outputs.mount_ln }} $docker_label "cmake -S /hpcc-dev/LN -B /hpcc-dev/HPCC-Platform/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ steps.vars.outputs.cmake_options }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF" - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} --mount ${{ steps.vars.outputs.mount_ln }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} --mount ${{ steps.vars.outputs.mount_ln }} $docker_label "cmake -S /hpcc-dev/LN -B /hpcc-dev/HPCC-Platform/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ steps.vars.outputs.cmake_options }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON" - docker run --rm --mount ${{ steps.vars.outputs.mount_platform }} --mount ${{ steps.vars.outputs.mount_ln }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" + docker_label=${{ secrets.DOCKER_USERNAME }}/build-${{ matrix.os }}:${{ needs.release.outputs.community_tag }} + docker run --rm --mount ${{ needs.release.outputs.mount_platform }} --mount ${{ needs.release.outputs.mount_ln }} $docker_label "cmake -S /hpcc-dev/LN -B /hpcc-dev/HPCC-Platform/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.release.outputs.cmake_options }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=OFF" + docker run --rm --mount ${{ needs.release.outputs.mount_platform }} --mount ${{ needs.release.outputs.mount_ln }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" + docker run --rm --mount ${{ needs.release.outputs.mount_platform }} --mount ${{ needs.release.outputs.mount_ln }} $docker_label "cmake -S /hpcc-dev/LN -B /hpcc-dev/HPCC-Platform/build -DHPCC_SOURCE_DIR=/hpcc-dev/HPCC-Platform ${{ needs.release.outputs.cmake_options }} -DINCLUDE_PLUGINS=ON -DCONTAINERIZED=OFF -DSUPPRESS_REMBED=ON -DSUPPRESS_V8EMBED=ON -DSUPPRESS_SPARK=ON -DCPACK_STRIP_FILES=ON" + docker run --rm --mount ${{ needs.release.outputs.mount_platform }} --mount ${{ needs.release.outputs.mount_ln }} $docker_label "cmake --build /hpcc-dev/HPCC-Platform/build --parallel $(nproc) --target package" - name: Upload LN Assets if: ${{ matrix.ln }} @@ -209,11 +229,10 @@ jobs: echo ${{ matrix.os }} ${{ matrix.triplet }} echo "Checkout to $Env:GITHUB_WORKSPACE" - - name: Calculate vars - id: vars + - name: Print vars shell: "bash" run: | - echo "branch_name=$(echo ${{ github.ref }} | cut -d'/' -f3)" >> $GITHUB_OUTPUT + echo "${{ toJSON(needs.release.outputs) }})" - name: OSX Dependencies if: ${{ contains(matrix.os, 'macos') }} @@ -250,22 +269,50 @@ jobs: run: | ./vcpkg/bootstrap-vcpkg.sh - - name: "Generate HPCC-Platform Build Files" + - name: CMake Packages working-directory: . shell: "bash" run: | mkdir build cd build cmake .. ${{ matrix.cmake_config_options }} + cmake --build . ${{ matrix.cmake_build_options }} --target package - - name: Bundle + - name: Upload Assets + uses: ncipollo/release-action@v1.12.0 + with: + allowUpdates: true + artifacts: "build/*.exe,build/*.msi,build/*.dmg,build/*.pkg,build/*.tar.gz" + + - name: Checkout LN + uses: actions/checkout@v3 + with: + repository: ${{ github.repository_owner }}/LN + ref: ${{ needs.release.outputs.internal_ref }} + path: ${{ github.workspace }}/LN + token: ${{ secrets.LNB_TOKEN }} + + - name: CMake LN Packages working-directory: ./build shell: "bash" run: | + ${{ matrix.sudo }} rm -f ./CMakeCache.txt + ${{ matrix.sudo }} rm -rf ./CMakeFiles + cmake -S ../LN ${{ matrix.cmake_config_options }} -DHPCC_SOURCE_DIR=.. cmake --build . ${{ matrix.cmake_build_options }} --target package - - name: Upload Assets + - name: Upload LN Assets uses: ncipollo/release-action@v1.12.0 with: + owner: ${{ secrets.LNB_ACTOR }} + repo: LN + token: ${{ secrets.LNB_TOKEN }} allowUpdates: true artifacts: "build/*.exe,build/*.msi,build/*.dmg,build/*.pkg,build/*.tar.gz" + + - name: Upload error logs + if: ${{ failure() || cancelled() }} + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.os }}-${{ matrix.package }}-logs + path: build/CMakeCache.txt diff --git a/dockerfiles/platform-build-base/Dockerfile b/dockerfiles/platform-build-base/Dockerfile new file mode 100644 index 00000000000..2ff69fe7284 --- /dev/null +++ b/dockerfiles/platform-build-base/Dockerfile @@ -0,0 +1,109 @@ +############################################################################## +# +# HPCC SYSTEMS software Copyright (C) 2020 HPCC Systems®. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +############################################################################## + +# Build container image containing all 3rd-party tools required to build HPCC platform + +# DEPRECATED --- DEPRECATED --- DEPRECATED + +FROM ubuntu:20.04 as build +ENV DEBIAN_FRONTEND=noninteractive +ARG BASE_VER + +# This is version 8.6 of the platform-build-base, and anyone building/using it should have set that in BASE_VER +RUN [ "${BASE_VER}" = "8.6" ] + +RUN apt clean -y && \ + apt autoclean -y && \ + apt install -y -f && \ + apt autoremove -y && \ + apt-get update -y + +RUN apt-get install -y \ + automake \ + autotools-dev \ + binutils-dev \ + bison \ + build-essential \ + curl \ + default-jdk \ + default-libmysqlclient-dev \ + flex \ + libapr1-dev \ + libaprutil1-dev \ + libarchive-dev \ + libatlas-base-dev \ + libblas-dev \ + libboost-regex-dev \ + libcppunit-dev \ + libcurl4-openssl-dev \ + libevent-dev \ + libhiredis-dev \ + libiberty-dev \ + libicu-dev \ + libldap2-dev \ + libmemcached-dev \ + libnuma-dev \ + libsqlite3-dev \ + libssl-dev \ + libtbb-dev \ + libtool \ + libv8-dev \ + libxalan-c-dev \ + libxslt1-dev \ + pkg-config \ + python-dev \ + python3-dev \ + r-base-dev \ + r-cran-inline \ + r-cran-rcpp \ + r-cran-rinside \ + rsync \ + psmisc \ + libpopt0 \ + zlib1g-dev + +RUN apt-get install -y \ + bash-completion \ + expect \ + git \ + nano \ + valgrind \ + sudo \ + vim \ + gdb \ + software-properties-common \ + lsb-release \ + jq + +RUN curl https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - +RUN apt-add-repository "deb https://deb.nodesource.com/node_16.x $(lsb_release -sc) main" +RUN apt-get update -y +RUN apt-get install -y nodejs + +RUN mkdir /home/temp +WORKDIR /home/temp +RUN curl https://cmake.org/files/v3.16/cmake-3.16.3.tar.gz | tar xvz +WORKDIR /home/temp/cmake-3.16.3/ +RUN ./bootstrap +RUN make -j4 +RUN make install +WORKDIR / +RUN rm -rf /home/temp +RUN apt-get clean -y + +FROM ubuntu:20.04 +COPY --from=build / /