diff --git a/.automation/build.py b/.automation/build.py
index 0db61cdd231..801457402e1 100644
--- a/.automation/build.py
+++ b/.automation/build.py
@@ -327,7 +327,11 @@ def build_dockerfile(
)
docker_from += [dockerfile_item]
# ARG
- elif dockerfile_item.startswith("ARG"):
+ elif dockerfile_item.startswith("ARG") or (
+ len(dockerfile_item.splitlines()) > 1
+ and dockerfile_item.splitlines()[0].startswith("# renovate: ")
+ and dockerfile_item.splitlines()[1].startswith("ARG")
+ ):
docker_arg += [dockerfile_item]
# COPY
elif dockerfile_item.startswith("COPY"):
@@ -423,7 +427,10 @@ def build_dockerfile(
docker_arg_top = []
docker_arg_main = []
for docker_arg_item in docker_arg:
- match = re.match(r"ARG\s+([a-zA-Z_][a-zA-Z0-9_]*)\s*=?\s*", docker_arg_item)
+ match = re.match(
+ r"(?:# renovate: .*\n)?ARG\s+([a-zA-Z_][a-zA-Z0-9_]*)\s*=?\s*",
+ docker_arg_item,
+ )
arg_name = match.group(1)
if arg_name in all_from_instructions:
docker_arg_top += [docker_arg_item]
@@ -523,7 +530,7 @@ def build_dockerfile(
+ ' && chown -R "$(id -u)":"$(id -g)" node_modules # fix for https://github.com/npm/cli/issues/5900 \\\n'
+ ' && echo "Removing extra node_module files…" \\\n'
+ ' && find . \\( -not -path "/proc" \\)'
- + ' -and \\( -type f'
+ + " -and \\( -type f"
+ ' \\( -iname "*.d.ts"'
+ ' -o -iname "*.map"'
+ ' -o -iname "*.npmignore"'
@@ -532,7 +539,7 @@ def build_dockerfile(
+ ' -o -iname "README.md"'
+ ' -o -iname ".package-lock.json"'
+ ' -o -iname "package-lock.json"'
- + ' \\) -o -type d -name /root/.npm/_cacache \\) -delete \n'
+ + " \\) -o -type d -name /root/.npm/_cacache \\) -delete \n"
+ "WORKDIR /\n"
)
replace_in_file(dockerfile, "#NPM__START", "#NPM__END", npm_install_command)
@@ -1111,7 +1118,7 @@ def generate_flavor_documentation(flavor_id, flavor, linters_tables_md):
def dump_as_json(value: Any, empty_value: str) -> str:
if not value:
return empty_value
- # Covert any value to string with JSON
+ # Convert any value to string with JSON
# Don't indent since markdown table supports single line only
result = json.dumps(value, indent=None, sort_keys=True)
return f"`{result}`"
@@ -2350,7 +2357,7 @@ def add_in_config_schema_file(variables):
json_schema["properties"] = json_schema_props
if updated is True:
with open(CONFIG_JSON_SCHEMA, "w", encoding="utf-8") as outfile:
- json.dump(json_schema, outfile, indent=4, sort_keys=True)
+ json.dump(json_schema, outfile, indent=2, sort_keys=True)
outfile.write("\n")
@@ -2367,7 +2374,7 @@ def remove_in_config_schema_file(variables):
json_schema["properties"] = json_schema_props
if updated is True:
with open(CONFIG_JSON_SCHEMA, "w", encoding="utf-8") as outfile:
- json.dump(json_schema, outfile, indent=4, sort_keys=True)
+ json.dump(json_schema, outfile, indent=2, sort_keys=True)
outfile.write("\n")
@@ -2707,7 +2714,7 @@ def generate_json_schema_enums():
with open(DESCRIPTOR_JSON_SCHEMA, "r", encoding="utf-8") as json_file:
json_schema = json.load(json_file)
json_schema["definitions"]["enum_flavors"]["enum"] = ["all_flavors"] + list(
- flavors.keys()
+ sorted(set(list(flavors.keys())))
)
with open(DESCRIPTOR_JSON_SCHEMA, "w", encoding="utf-8") as outfile:
json.dump(json_schema, outfile, indent=2, sort_keys=True)
@@ -2724,6 +2731,14 @@ def generate_json_schema_enums():
json_schema["definitions"]["enum_linter_keys"]["enum"] = [x.name for x in linters]
# Deprecated linters
json_schema["definitions"]["enum_linter_keys"]["enum"] += DEPRECATED_LINTERS
+
+ # Sort:
+ json_schema["definitions"]["enum_descriptor_keys"]["enum"] = sorted(
+ set(json_schema["definitions"]["enum_descriptor_keys"]["enum"])
+ )
+ json_schema["definitions"]["enum_linter_keys"]["enum"] = sorted(
+ set(json_schema["definitions"]["enum_linter_keys"]["enum"])
+ )
with open(CONFIG_JSON_SCHEMA, "w", encoding="utf-8") as outfile:
json.dump(json_schema, outfile, indent=2, sort_keys=True)
outfile.write("\n")
@@ -3352,17 +3367,22 @@ def update_workflow_linters(file_path, linters):
if __name__ == "__main__":
+ logging_format = (
+ "[%(levelname)s] %(message)s"
+ if "CI" in os.environ
+ else "%(asctime)s [%(levelname)s] %(message)s"
+ )
try:
logging.basicConfig(
force=True,
level=logging.INFO,
- format="%(asctime)s [%(levelname)s] %(message)s",
+ format=logging_format,
handlers=[logging.StreamHandler(sys.stdout)],
)
except ValueError:
logging.basicConfig(
level=logging.INFO,
- format="%(asctime)s [%(levelname)s] %(message)s",
+ format=logging_format,
handlers=[logging.StreamHandler(sys.stdout)],
)
config.init_config("build")
diff --git a/.automation/generated/linter-helps.json b/.automation/generated/linter-helps.json
index fc564c98402..6ca62c4696a 100644
--- a/.automation/generated/linter-helps.json
+++ b/.automation/generated/linter-helps.json
@@ -25,7 +25,7 @@
"",
"Documents:",
"",
- " https://github.com/rhysd/actionlint/tree/main/docs",
+ " https://github.com/rhysd/actionlint/tree/v1.7.1/docs",
"",
"Flags:",
" -color",
@@ -314,7 +314,7 @@
" B703 django_mark_safe"
],
"bash-exec": [
- "GNU bash, version 5.2.21(1)-release-(x86_64-alpine-linux-musl)",
+ "GNU bash, version 5.2.26(1)-release-(x86_64-alpine-linux-musl)",
"Usage: bash [GNU long option] [option] ...",
" bash [GNU long option] [option] script-file ...",
"GNU long options:",
@@ -345,7 +345,7 @@
"General help using GNU software: "
],
"bicep_linter": [
- "Bicep CLI version 0.27.1 (4b41cb6d4b)",
+ "Bicep CLI version 0.28.1 (ba1e9f8c1e)",
"",
"Usage:",
" bicep build [options] ",
@@ -2342,7 +2342,7 @@
" --builtins BUILTINS define more built-ins, comma separated",
" --doctests also check syntax of the doctests",
"",
- "Installed plugins: mccabe: 0.7.0, pycodestyle: 2.11.1, pyflakes: 3.2.0"
+ "Installed plugins: mccabe: 0.7.0, pycodestyle: 2.12.0, pyflakes: 3.2.0"
],
"gherkin-lint": [
"Usage: gherkin-lint [options] ",
@@ -2525,6 +2525,7 @@
"",
"Available Commands:",
" completion Generate a shell completion for Grype (listing local docker images)",
+ " config show the grype configuration",
" db vulnerability database operations",
" explain Ask grype to explain a set of findings",
" help Help about any command",
@@ -3143,7 +3144,7 @@
" comma to use (Default is time,console)",
" -o, --output [string] reporters to use (Default is ./report/)",
" -m, --mode [string] mode of quality of search, can be \"strict\", \"mild\" and \"weak\" (Default is \"function mild(token) {",
- " return strict(token) && token.type !== 'empty' && token.type !== 'new_line';",
+ " return strict(token) && token.type !== \"empty\" && token.type !== \"new_line\";",
" }\")",
" -f, --format [string] format or formats separated by comma (Example",
" php,javascript,python)",
@@ -3463,49 +3464,50 @@
" --version version for kubeval"
],
"lightning-flow-scanner": [
- "Try to resolve the errors in the following flows:",
+ "(node:1756) [DEP0040] DeprecationWarning: The `punycode` module is deprecated. Please use a userland alternative instead.",
+ "(Use `node --trace-deprecation ...` to show where the warning was created)",
+ "Find and fix potential bugs in Salesforce flows.",
"",
"USAGE",
- " $ sf flow scan [-d ] [-c ] [-f",
- " error|warning|note|never] [-r] [-p ] [-u ] [--apiversion",
- " ] [--json] [--loglevel",
- " trace|debug|info|warn|error|fatal|TRACE|DEBUG|INFO|WARN|ERROR|FATAL]",
+ " $ sf flow scan [--json] [--flags-dir ] [-d ] [-c",
+ " ] [-f error|warning|note|never] [-r] [-p ] [-u ]",
"",
"FLAGS",
- " -c, --config=",
- " Path to configuration file",
- "",
- " -d, --directory=",
- " Directory to scan for flows",
- "",
- " -f, --failon=(error|warning|note|never)",
- " [default: error] Thresold failure level (error, warning, note, or never)",
- " defining when the command return code will be 1",
- "",
- " -p, --sourcepath=",
- " comma-separated list of source flow paths to scan",
- "",
- " -r, --retrieve",
- " Force retrieve Flows from org at the start of the command",
- "",
- " -u, --targetusername=",
- " username or alias for the target org; overrides default target org",
- "",
- " --apiversion=",
- " override the api version used for api requests made by this command",
+ " -c, --config= Path to configuration file",
+ " -d, --directory= Directory to scan for flows",
+ " -f, --failon=