From 3e31c3e7b17dc23a638434224147803ea7a09341 Mon Sep 17 00:00:00 2001 From: Rob Rudin Date: Mon, 10 Jun 2024 09:43:48 -0400 Subject: [PATCH] MLE-14507 Renamed options to use --lower-hyphenated No functional changes, just a bunch of search/replace. Added a test - `VerifyOptionNamesTest` - that ensures there are no option names containing uppercase characters. --- CONTRIBUTING.md | 34 ++++---- docs/common-options.md | 58 ++++++------- docs/copy.md | 14 +-- docs/export/export-archives.md | 8 +- docs/export/export-documents.md | 20 ++--- docs/export/export-rdf.md | 4 +- docs/export/export-rows.md | 16 ++-- docs/export/specifying-path.md | 4 +- docs/getting-started.md | 56 ++++++------ docs/import/common-import-features.md | 28 +++--- docs/import/import-files/aggregate-xml.md | 10 +-- docs/import/import-files/archives.md | 4 +- docs/import/import-files/avro.md | 8 +- docs/import/import-files/delimited-text.md | 8 +- docs/import/import-files/handling-errors.md | 6 +- docs/import/import-files/json.md | 8 +- docs/import/import-files/orc.md | 8 +- docs/import/import-files/parquet.md | 8 +- docs/import/import-files/rdf.md | 6 +- docs/import/import-files/regular-files.md | 8 +- docs/import/import-files/selecting-files.md | 6 +- docs/import/import-jdbc.md | 18 ++-- docs/import/tuning-performance.md | 10 +-- docs/reprocess.md | 62 ++++++------- mlcp-testing/build.gradle | 40 ++++----- .../marklogic/newtool/impl/CommonParams.java | 4 +- .../newtool/impl/ConnectionInputs.java | 2 +- .../newtool/impl/ConnectionParams.java | 64 +++++++------- .../impl/ConnectionParamsValidator.java | 12 +-- .../marklogic/newtool/impl/JdbcParams.java | 8 +- .../com/marklogic/newtool/impl/S3Params.java | 4 +- .../newtool/impl/copy/CopyCommand.java | 32 +++---- .../impl/copy/OutputConnectionParams.java | 72 ++++++++-------- .../impl/export/ExportFilesCommand.java | 4 +- .../impl/export/ExportRdfFilesCommand.java | 12 +-- .../impl/export/ReadDocumentParams.java | 14 +-- .../newtool/impl/export/ReadRowsParams.java | 4 +- .../newtool/impl/export/WriteFilesParams.java | 2 +- .../importdata/ImportAggregateXmlCommand.java | 4 +- .../impl/importdata/ImportFilesCommand.java | 2 +- .../impl/importdata/ImportJdbcCommand.java | 4 +- .../importdata/ImportJsonFilesCommand.java | 2 +- .../importdata/ImportRdfFilesCommand.java | 2 +- .../impl/importdata/ReadFilesParams.java | 4 +- .../impl/importdata/WriteDocumentParams.java | 26 +++--- .../WriteStructuredDocumentParams.java | 8 +- .../impl/reprocess/ReprocessCommand.java | 58 ++++++------- .../marklogic-spark-messages_en.properties | 27 +++--- .../marklogic/newtool/api/ConnectionTest.java | 2 +- .../newtool/api/ParquetFilesExporterTest.java | 2 +- .../impl/ConfigureSparkMasterUrlTest.java | 4 +- .../newtool/impl/ConnectionParamsTest.java | 40 ++++----- .../newtool/impl/ErrorMessagesTest.java | 26 +++--- .../newtool/impl/ExportRdfFilesTest.java | 12 +-- .../newtool/impl/HandleErrorTest.java | 16 ++-- .../com/marklogic/newtool/impl/LimitTest.java | 2 +- .../impl/ValidateMarkLogicConnectionTest.java | 12 +-- .../newtool/impl/VerifyOptionNamesTest.java | 79 +++++++++++++++++ .../newtool/impl/copy/CopyOptionsTest.java | 86 +++++++++---------- .../marklogic/newtool/impl/copy/CopyTest.java | 62 ++++++------- .../custom/CustomExportDocumentsTest.java | 2 +- .../impl/custom/CustomExportRowsTest.java | 2 +- .../newtool/impl/custom/CustomImportTest.java | 22 ++--- .../impl/export/ExportArchiveFilesTest.java | 14 +-- .../impl/export/ExportAvroFilesTest.java | 6 +- .../ExportDelimitedFilesCommandTest.java | 8 +- .../impl/export/ExportFilesOptionsTest.java | 6 +- .../newtool/impl/export/ExportFilesTest.java | 22 ++--- .../newtool/impl/export/ExportJdbcTest.java | 12 +-- .../impl/export/ExportJsonLinesFilesTest.java | 10 +-- .../impl/export/ExportOrcFilesTest.java | 6 +- .../impl/export/ExportParquetFilesTest.java | 12 +-- .../export/ExportRdfFilesOptionsTest.java | 10 +-- .../ImportAggregateXmlFilesTest.java | 40 ++++----- .../importdata/ImportArchiveFilesTest.java | 14 +-- .../impl/importdata/ImportAvroFilesTest.java | 22 ++--- .../importdata/ImportDelimitedFilesTest.java | 36 ++++---- .../importdata/ImportFilesOptionsTest.java | 24 +++--- .../impl/importdata/ImportFilesTest.java | 56 ++++++------ .../impl/importdata/ImportFromS3Test.java | 4 +- .../impl/importdata/ImportJdbcTest.java | 42 ++++----- .../ImportJdbcWithAggregatesTest.java | 30 +++---- .../impl/importdata/ImportJsonFilesTest.java | 42 ++++----- .../ImportMlcpArchiveFilesTest.java | 10 +-- .../impl/importdata/ImportOrcFilesTest.java | 24 +++--- .../importdata/ImportParquetFilesTest.java | 26 +++--- .../impl/importdata/ImportRdfFilesTest.java | 20 ++--- .../impl/importdata/ImportRowsAsXmlTest.java | 48 +++++------ .../impl/reprocess/ReprocessOptionsTest.java | 86 +++++++++---------- .../newtool/impl/reprocess/ReprocessTest.java | 38 ++++---- .../newtool/junit5/TwoWaySslConfigurer.java | 2 +- 91 files changed, 971 insertions(+), 891 deletions(-) create mode 100644 new-tool-cli/src/test/java/com/marklogic/newtool/impl/VerifyOptionNamesTest.java diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e42ce279..71b1f919 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -140,32 +140,32 @@ You can cause a failure with MarkLogic that caused the command to stop: ``` ./nt/bin/nt import-files --path "new-tool-cli/src/test/resources/mixed-files/*" \ - --connectionString "new-tool-user:password@localhost:8000" \ + --connection-string "new-tool-user:password@localhost:8000" \ --repartition 1 \ - --abortOnWriteFailure \ + --abort-on-write-failure \ --permissions "invalid-role,read,new-tool-role,update" \ - --uriReplace ".*/mixed-files,'/test'" + --uri-replace ".*/mixed-files,'/test'" ``` You can cause a failure and ask to see the full stacktrace (often noisy and not helpful): ``` ./nt/bin/nt import-files --path "new-tool-cli/src/test/resources/mixed-files/*" \ - --connectionString "new-tool-user:password@localhost:8000" \ + --connection-string "new-tool-user:password@localhost:8000" \ --repartition 1 \ --permissions "invalid-role,read,new-tool-role,update" \ - --uriReplace ".*/mixed-files,'/test'" \ - --abortOnWriteFailure \ + --uri-replace ".*/mixed-files,'/test'" \ + --abort-on-write-failure \ --stacktrace ``` -You can cause a failure and tell the command to keep executing by not including `--abortOnWriteFailure`: +You can cause a failure and tell the command to keep executing by not including `--abort-on-write-failure`: ``` ./nt/bin/nt import-files --path "new-tool-cli/src/test/resources/mixed-files/*" \ - --connectionString "new-tool-user:password@localhost:8000" \ + --connection-string "new-tool-user:password@localhost:8000" \ --permissions "invalid-role,read,new-tool-role,update" \ - --uriReplace ".*/mixed-files,'/test'" + --uri-replace ".*/mixed-files,'/test'" ``` ## Testing with a load balancer @@ -181,13 +181,13 @@ owned by the performance team. Feel free to adjust this config locally as needed Example of using the existing config to copy from port 8015 to port 8016 in the performance cluster: ``` -./nt/bin/nt copy --connectionString "admin:admin@localhost:8006" \ +./nt/bin/nt copy --connection-string "admin:admin@localhost:8006" \ --collections "address_small" \ - --batchSize 500 \ + --batch-size 500 \ --limit 10000 \ --categories content,metadata \ - --outputConnectionString "admin:admin@localhost:8007" \ - --outputThreadCount 3 --partitionsPerForest 1 --outputBatchSize 200 + --output-connection-string "admin:admin@localhost:8007" \ + --output-thread-count 3 --partitions-per-forest 1 --output-batch-size 200 ``` ## Testing against a separate Spark cluster @@ -239,7 +239,7 @@ cluster: ``` $SPARK_HOME/bin/spark-submit --class com.marklogic.newtool.cli.Submit \ --master spark://NYWHYC3G0W:7077 new-tool-cli/build/libs/new-tool-cli-0.2.0-all.jar \ -import-files --path /Users/rudin/workspace/new-tool/new-tool-cli/src/test/resources/mixed-files --preview 5 --previewDrop content +import-files --path /Users/rudin/workspace/new-tool/new-tool-cli/src/test/resources/mixed-files --preview 5 --preview-drop content ``` After spark-submit completes, you can refresh to see evidence of the completed application. @@ -253,7 +253,7 @@ to something you can access : $SPARK_HOME/bin/spark-submit --class com.marklogic.newtool.cli.Submit \ --packages org.apache.hadoop:hadoop-aws:3.3.6,org.apache.hadoop:hadoop-client:3.3.6 \ --master spark://NYWHYC3G0W:7077 new-tool-cli/build/libs/new-tool-cli-0.1-SNAPSHOT-all.jar \ -import-files --path "s3a://changeme/*.*" --preview 10 --previewDrop content +import-files --path "s3a://changeme/*.*" --preview 10 --preview-drop content ``` ### Testing with AWS EMR @@ -279,11 +279,11 @@ Once your cluster is created, you'll add a "Step" in order to run spark-submit: 3. For "Spark-submit options", enter `--class com.marklogic.newtool.cli.Submit`. 4. For "Arguments", enter the CLI command all the args you would normally enter when using the CLI. -If your CLI command will be accessing S3, you most likely should not include `--s3AddCredentials`. The EMR EC2 instance +If your CLI command will be accessing S3, you most likely should not include `--s3-add-credentials`. The EMR EC2 instance will already have access to the S3 buckets per the "EC2 instance profile" you configured while creating your cluster. Additionally, if your CLI command is accessing an S3 bucket in a region other than the one that EMR is running in, -you can add `--s3Endpoint s3.us-east-1.amazon.com` as an argument, replacing "us-east-1" with the region that the +you can add `--s3-endpoint s3.us-east-1.amazon.com` as an argument, replacing "us-east-1" with the region that the S3 buckets is in. After adding your step, it will run. It typically takes about 30s for the step to run, and it may take a minute or so diff --git a/docs/common-options.md b/docs/common-options.md index 173f28c2..f25518f7 100644 --- a/docs/common-options.md +++ b/docs/common-options.md @@ -22,9 +22,9 @@ Generally, you must include at least the following information for each command: - Authentication information. For the common use case of using digest or basic authentication with a MarkLogic app server, you can use the -`--connectionString` option to specify the host, port, username, and password in a single concise option: +`--connection-string` option to specify the host, port, username, and password in a single concise option: - --connectionString user:password@host:port + --connection-string user:password@host:port For other authentication mechanisms, you must use the `--host` and `--port` options to define the host and port for your MarkLogic app server. @@ -33,30 +33,30 @@ All available connection options are shown in the table below: | Option | Description | | --- | --- | -| --authType | Type of authentication to use. Possible values are `BASIC`, `DIGEST`, `CLOUD`, `KERBEROS`, `CERTIFICATE`, and `SAML`.| -| --basePath | Path to prepend to each call to a MarkLogic REST API app server. | -| --certificateFile | File path for a key store to be used for 'CERTIFICATE' authentication. | -| --certificatePassword | Password for the key store referenced by '--certificateFile'. | -| --connectionString | Defines a connection string as user:password@host:port; only usable when using `DIGEST` or `BASIC` authentication. | -| --cloudApiKey | API key for authenticating with a MarkLogic Cloud cluster. | -| --connectionType | Defines whether connections can be made directly to each host in the MarkLogic cluster. Possible values are `DIRECT` and `GATEWAY`. | +| --auth-type | Type of authentication to use. Possible values are `BASIC`, `DIGEST`, `CLOUD`, `KERBEROS`, `CERTIFICATE`, and `SAML`.| +| --base-path | Path to prepend to each call to a MarkLogic REST API app server. | +| --certificate-file | File path for a keystore to be used for 'CERTIFICATE' authentication. | +| --certificate-password | Password for the keystore referenced by '--certificate-file'. | +| --connection-string | Defines a connection string as user:password@host:port; only usable when using `DIGEST` or `BASIC` authentication. | +| --cloud-api-key | API key for authenticating with a MarkLogic Cloud cluster. | +| --connection-type | Defines whether connections can be made directly to each host in the MarkLogic cluster. Possible values are `DIRECT` and `GATEWAY`. | | --database | Name of a database to connect if it differs from the one associated with the app server identified by '--port'. | -| --disableGzippedResponses | If included, responses from MarkLogic will not be gzipped. May improve performance when responses are very small. +| --disable-gzipped-responses | If included, responses from MarkLogic will not be gzipped. May improve performance when responses are very small. | --host | The MarkLogic host to connect to. | -| --kerberosPrincipal | Principal to be used with `KERBEROS` authentication. | -| --keyStoreAlgorithm | Algorithm of the key store identified by '--keyStorePath'; defaults to `SunX509`. | -| --keyStorePassword | Password for the key store identified by '--keyStorePath'. | -| --keyStorePath | File path for a key store for two-way SSL connections. | -| --keyStoreType | Type of the key store identified by '--keyStorePath'; defaults to `JKS`. | +| --kerberos-principal | Principal to be used with `KERBEROS` authentication. | +| --keystore-algorithm | Algorithm of the keystore identified by '--keystore-path'; defaults to `SunX509`. | +| --keystore-password | Password for the keystore identified by '--keystore-path'. | +| --keystore-path | File path for a keystore for two-way SSL connections. | +| --keystore-type | Type of the keystore identified by '--keystore-path'; defaults to `JKS`. | | --password | Password when using `DIGEST` or `BASIC` authentication. | | --port | Port of the [REST API app server](https://docs.marklogic.com/guide/rest-dev) to connect to. | -| --samlToken | Token to be used with `SAML` authentication. | -| --sslHostnameVerifier | Hostname verification strategy when connecting via SSL. Possible values are `ANY`, `COMMON`, and `STRICT`. | -| --sslProtocol | SSL protocol to use when the MarkLogic app server requires an SSL connection. If a key store or trust store is configured, defaults to `TLSv1.2`. | -| --trustStoreAlgorithm | Algorithm of the trust store identified by '--trustStorePath'; defaults to `SunX509`. | -| --trustStorePassword | Password for the trust store identified by '--trustStorePath'. | -| --trustStorePath | File path for a trust store for establishing trust with the certificate used by the MarkLogic app server. | -| --trustStoreType | Type of the trust store identified by '--trustStorePath'; defaults to `JKS`. | +| --saml-token | Token to be used with `SAML` authentication. | +| --ssl-hostname-verifier | Hostname verification strategy when connecting via SSL. Possible values are `ANY`, `COMMON`, and `STRICT`. | +| --ssl-protocol | SSL protocol to use when the MarkLogic app server requires an SSL connection. If a keystore or truststore is configured, defaults to `TLSv1.2`. | +| --truststore-algorithm | Algorithm of the truststore identified by '--truststore-path'; defaults to `SunX509`. | +| --truststore-password | Password for the truststore identified by '--truststore-path'. | +| --truststore-path | File path for a truststore for establishing trust with the certificate used by the MarkLogic app server. | +| --truststore-type | Type of the truststore identified by '--truststore-path'; defaults to `JKS`. | | --username | Username when using `DIGEST` or `BASIC` authentication. | @@ -93,17 +93,17 @@ you can preview an import without writing any of the data to MarkLogic: For commands that read from a source other than MarkLogic, you are not required to specify any MarkLogic connection information when including `--preview` since no connection needs to be made to MarkLogic. -The number after `--preview` specifies how many records to show. You can use `--previewDrop` to specify potentially -verbose columns to drop from the preview. And you can use `--previewVertical` to see the results a vertical display +The number after `--preview` specifies how many records to show. You can use `--preview-drop` to specify potentially +verbose columns to drop from the preview. And you can use `--preview-vertical` to see the results a vertical display instead of in a table: ``` ./bin/nt import-parquet-files \ - --connectionString "nt-user:password@localhost:8004" \ + --connection-string "nt-user:password@localhost:8004" \ --path export/parquet \ --preview 10 \ - --previewDrop job_title,department - --previewVertical + --preview-drop job_title,department + --preview-vertical ``` Note that in the case of previewing an import, NT will show the data as it has been read, which consists of a set of @@ -120,10 +120,10 @@ The following shows an example of only importing the first 10 rows from a delimi ``` ./bin/nt import-delimited-files \ --path ../data/employees.csv.gz \ - --connectionString "nt-user:password@localhost:8004" \ + --connection-string "nt-user:password@localhost:8004" \ --permissions nt-role,read,nt-role,update \ --collections employee \ - --uriTemplate "/employee/{id}.json" \ + --uri-template "/employee/{id}.json" \ --limit 10 ``` diff --git a/docs/copy.md b/docs/copy.md index 9b0411e6..ee2aa7f9 100644 --- a/docs/copy.md +++ b/docs/copy.md @@ -22,19 +22,19 @@ The following options control which documents are read from MarkLogic: | Option | Description | | --- |--- | -| --stringQuery | A string query utilizing MarkLogic's search grammar. | +| --string-query | A string query utilizing MarkLogic's search grammar. | | --query | A structured, serialized CTS, or combined query expressed as JSON or XML. | | --options | Name of a REST API search options document; typically used with a string query. | | --collections | Comma-delimited sequence of collection names. | | --directory | A database directory for constraining on URIs. | -You must specify at least one of `--stringQuery`, `--query`, `--collections`, or `--directory`. You may specify any +You must specify at least one of `--string-query`, `--query`, `--collections`, or `--directory`. You may specify any combination of those options as well. The `copy` command then requires that you specify connection information via for the target database that the documents will be copied into. Each of the [connection options](common-options.md) can be used for this target database, but with `output` as a prefix so that they are distinguished from the connections used for the source database. For example, -`--outputConnectionString` is used to specify a connection string for the target database. If you are copying the documents +`--output-connection-string` is used to specify a connection string for the target database. If you are copying the documents to the same database that they were read from, you can omit output connection options. The following shows an example of copying documents from a collection to a different database in the same MarkLogic @@ -42,10 +42,10 @@ cluster: ``` ./bin/nt copy \ - --connectionString "user:password@localhost:8000" \ + --connection-string "user:password@localhost:8000" \ --collections "example" \ - --outputConnectionString "user:password@localhost:8000" \ - --outputDatabase "target-database" + --output-connection-string "user:password@localhost:8000" \ + --output-database "target-database" ``` ## Controlling what metadata is read @@ -62,5 +62,5 @@ following will only read documents and their collections and permissions: The `copy` command supports many of the same options as the [import commands](import/common-import-features.md) for writing documents. But similar to the output connection options, each option for controlling how documents are written -is prefixed with `output`. For example, to specify collections for the documents, `--outputCollections` is used instead +is prefixed with `output`. For example, to specify collections for the documents, `--output-collections` is used instead of `--collections`. diff --git a/docs/export/export-archives.md b/docs/export/export-archives.md index 98317dea..fb0cd571 100644 --- a/docs/export/export-archives.md +++ b/docs/export/export-archives.md @@ -25,13 +25,13 @@ The following options then control which documents are selected to be exported: | Option | Description | | --- |--- | -| --stringQuery | A string query utilizing MarkLogic's search grammar. | +| --string-query | A string query utilizing MarkLogic's search grammar. | | --query | A structured, serialized CTS, or combined query expressed as JSON or XML. | | --options | Name of a REST API search options document; typically used with a string query. | | --collections | Comma-delimited sequence of collection names. | | --directory | A database directory for constraining on URIs. | -You must specify at least one of `--stringQuery`, `--query`, `--collections`, or `--directory`. You may specify any +You must specify at least one of `--string-query`, `--query`, `--collections`, or `--directory`. You may specify any combination of those options as well. You must then use the `--path` option to specify a directory to write archive files to. @@ -58,5 +58,5 @@ to each document before it is written to an archive. A transform is configured v | Option | Description | | --- | --- | | --transform | Name of a MarkLogic REST transform to apply to the document before writing it. | -| --transformParams | Comma-delimited list of transform parameter names and values - e.g. param1,value1,param2,value2. | -| --transformParamsDelimiter | Delimiter for `--transformParams`; typically set when a value contains a comma. | +| --transform-params | Comma-delimited list of transform parameter names and values - e.g. param1,value1,param2,value2. | +| --transform-params-delimiter | Delimiter for `--transform-params`; typically set when a value contains a comma. | diff --git a/docs/export/export-documents.md b/docs/export/export-documents.md index 8205837a..58a2706f 100644 --- a/docs/export/export-documents.md +++ b/docs/export/export-documents.md @@ -19,19 +19,19 @@ The `export-files` command is used to select documents in a MarkLogic database a You must specify a `--path` option for where files should be written along with connection information for the MarkLogic database you wish to write to: - ./bin/nt export-files --path /path/to/files --connectionString "user:password@localhost:8000" + ./bin/nt export-files --path /path/to/files --connection-string "user:password@localhost:8000" The following options then control which documents are selected to be exported: | Option | Description | | --- |--- | -| --stringQuery | A string query utilizing MarkLogic's search grammar. | +| --string-query | A string query utilizing MarkLogic's search grammar. | | --query | A structured, serialized CTS, or combined query expressed as JSON or XML. | | --options | Name of a REST API search options document; typically used with a string query. | | --collections | Comma-delimited sequence of collection names. | | --directory | A database directory for constraining on URIs. | -You must specify at least one of `--stringQuery`, `--query`, `--collections`, or `--directory`. You may specify any +You must specify at least one of `--string-query`, `--query`, `--collections`, or `--directory`. You may specify any combination of those options as well. ## Transforming document content @@ -42,8 +42,8 @@ to each document before it is written to a file. A transform is configured via t | Option | Description | | --- | --- | | --transform | Name of a MarkLogic REST transform to apply to the document before writing it. | -| --transformParams | Comma-delimited list of transform parameter names and values - e.g. param1,value1,param2,value2. | -| --transformParamsDelimiter | Delimiter for `--transformParams`; typically set when a value contains a comma. | +| --transform-params | Comma-delimited list of transform parameter names and values - e.g. param1,value1,param2,value2. | +| --transform-params-delimiter | Delimiter for `--transform-params`; typically set when a value contains a comma. | ## Compressing content @@ -67,7 +67,7 @@ the following command below from the [Getting Started guide](getting-started.md) ``` rm export/*.zip -./bin/nt export-files --connectionString nt-user:password@localhost:8004 \ +./bin/nt export-files --connection-string nt-user:password@localhost:8004 \ --collections employee \ --path export --compression zip ``` @@ -76,15 +76,15 @@ The `./export` directory will have 12 zip files in it. This count is due to how which involves creating 4 partitions by default per forest in the MarkLogic database. The example application has 3 forests in its content database, and thus 12 partitions are created, resulting in 12 separate zip files. -You can use the `--partitionsPerForest` option to control how many partitions - and thus workers - read documents +You can use the `--partitions-per-forest` option to control how many partitions - and thus workers - read documents from each forest in your database: ``` rm export/*.zip -./bin/nt export-files --connectionString nt-user:password@localhost:8004 \ +./bin/nt export-files --connection-string nt-user:password@localhost:8004 \ --collections employee \ --path export --compression zip \ - --partitionsPerForest 1 + --partitions-per-forest 1 ``` This approach will produce 3 zip files - one per forest. @@ -94,7 +94,7 @@ writing data, regardless of how many were used to read the data: ``` rm export/*.zip -./bin/nt export-files --connectionString nt-user:password@localhost:8004 \ +./bin/nt export-files --connection-string nt-user:password@localhost:8004 \ --collections employee \ --path export --compression zip \ --repartition 1 diff --git a/docs/export/export-rdf.md b/docs/export/export-rdf.md index 6548b734..b41c4538 100644 --- a/docs/export/export-rdf.md +++ b/docs/export/export-rdf.md @@ -10,13 +10,13 @@ More to come, just an example for now: ``` ./bin/nt import-rdf-files \ --path ../1k.n3 \ - --connectionString "admin:admin@localhost:8000" \ + --connection-string "admin:admin@localhost:8000" \ --collections "my-triples" \ --permissions "rest-reader,read,rest-writer,update" ./bin/nt export-rdf-files \ --path export \ - --connectionString "admin:admin@localhost:8000" \ + --connection-string "admin:admin@localhost:8000" \ --graphs "my-triples" \ --format ttl ``` diff --git a/docs/export/export-rows.md b/docs/export/export-rows.md index 0201e4ae..c4165399 100644 --- a/docs/export/export-rows.md +++ b/docs/export/export-rows.md @@ -31,7 +31,7 @@ provides additional guidance on how to write an Optic query. You must also specify connection information for the MarkLogic database you wish to query. Please see the [guide on common options](../common-options.md) for instructions on doing so. -The `--batchSize` and `--partitions` query are used to tune performance by controlling how many rows are retrieved in +The `--batch-size` and `--partitions` query are used to tune performance by controlling how many rows are retrieved in a single call to MarkLogic and how many requests are made in parallel. It is recommended to first test your command without setting these options to see if the performance is acceptable. When you are ready to attempt to optimize the performance of your export command, please see the @@ -53,10 +53,10 @@ NT. The `export-jdbc` command requires that you specify connection details for the database you wish to write to via JDBC. Connection details are specified via the following options: -- `--jdbcUrl` is required and specifies the JDBC connection URL. -- `--jdbcDriver` is required specifies the main class name of the JDBC driver. -- `--jdbcUser` specifies an optional user to authenticate as (this may already be specified via `--jdbcUrl`). -- `--jdbcPassword` specifies an optional password to authenticate with (this may already be specified via `--jdbcUrl`). +- `--jdbc-url` is required and specifies the JDBC connection URL. +- `--jdbc-driver` is required specifies the main class name of the JDBC driver. +- `--jdbc-user` specifies an optional user to authenticate as (this may already be specified via `--jdbc-url`). +- `--jdbc-password` specifies an optional password to authenticate with (this may already be specified via `--jdbc-url`). ### Exporting to a table @@ -65,10 +65,10 @@ Once you have installed your database's JDBC driver and determined your JDBC con a notional example of doing so: ``` -./bin/nt export-jdbc --connectionString user:password@localhost:8000 \ +./bin/nt export-jdbc --connection-string user:password@localhost:8000 \ --query "op.fromView('example', 'employee', '')" \ - --jdbcUrl "jdbc:postgresql://localhost/example?user=postgres&password=postgres" \ - --jdbcDriver "org.postgresql.Driver" \ + --jdbc-url "jdbc:postgresql://localhost/example?user=postgres&password=postgres" \ + --jdbc-driver "org.postgresql.Driver" \ --table "marklogic-employee-data" ``` diff --git a/docs/export/specifying-path.md b/docs/export/specifying-path.md index a65f64ea..a0df4340 100644 --- a/docs/export/specifying-path.md +++ b/docs/export/specifying-path.md @@ -16,8 +16,8 @@ NT can export files to S3 via a path expression of the form `s3a://bucket-name/o In most cases, NT must use your AWS credentials to access an S3 bucket. NT uses the AWS SDK to fetch credentials from [locations supported by the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-authentication-short-term.html). -To enable this, include the `--s3AddCredentials` option: +To enable this, include the `--s3-add-credentials` option: ``` -./bin/nt export-files --path "s3a://my-bucket/some/path" --s3AddCredentials +./bin/nt export-files --path "s3a://my-bucket/some/path" --s3-add-credentials ``` diff --git a/docs/getting-started.md b/docs/getting-started.md index 767fc904..41207060 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -57,14 +57,14 @@ command. To see the usage for a particular command, such as `import-files`, run: ./bin/nt help import-files -Required options are marked with an asterisk - "*". Additionally, every command requires that either `--connectionString` +Required options are marked with an asterisk - "*". Additionally, every command requires that either `--connection-string` or `--host` and `--port` be specified so that the tool knows which MarkLogic cluster to connect to. -The `--connectionString` option provides a succinct way of defining the host, port, username, and password when the MarkLogic +The `--connection-string` option provides a succinct way of defining the host, port, username, and password when the MarkLogic app server you connect to requires basic or digest authentication. Its value is of the form `(user):(password)@(host):(port)`. For example: - ./bin/nt import-files --connectionString "my-user:my-secret@localhost:8000" ... + ./bin/nt import-files --connection-string "my-user:my-secret@localhost:8000" ... Options can also be read from a file; see the [Common Options](common-options.md) guide for more information. @@ -78,10 +78,10 @@ demonstrated: ``` ./bin/nt import-delimited-files \ --path ../data/employees.csv.gz \ - --connectionString "nt-user:password@localhost:8004" \ + --connection-string "nt-user:password@localhost:8004" \ --permissions nt-role,read,nt-role,update \ --collections employee \ - --uriTemplate "/employee/{id}.json" + --uri-template "/employee/{id}.json" ``` By accessing your [MarkLogic qconsole](https://docs.marklogic.com/guide/qconsole), you can see that the `employee` @@ -98,10 +98,10 @@ requires a separate Postgres database; it is only included for reference): ``` ./bin/nt import-jdbc \ - --jdbcUrl "jdbc:postgresql://localhost/dvdrental?user=postgres&password=postgres" \ - --jdbcDriver "org.postgresql.Driver" \ + --jdbc-url "jdbc:postgresql://localhost/dvdrental?user=postgres&password=postgres" \ + --jdbc-driver "org.postgresql.Driver" \ --query "select * from customer" \ - --connectionString "new-tool-user:password@localhost:8004" \ + --connection-string "new-tool-user:password@localhost:8004" \ --permissions nt-role,read,nt-role,update \ --collections customer ``` @@ -119,18 +119,18 @@ to select rows. The following shows an example of exporting the 1000 employee do ``` mkdir export ./bin/nt export-files \ - --connectionString "nt-user:password@localhost:8004" \ + --connection-string "nt-user:password@localhost:8004" \ --collections employee \ --path export \ --compression zip \ - --zipFileCount 1 + --zip-file-count 1 ``` The above command specifies a collection of documents to export. You can also use the `--query` option to specify a [structured query](https://docs.marklogic.com/guide/search-dev/structured-query), [serialized CTS query](https://docs.marklogic.com/guide/rest-dev/search#id_30577), or [complex query](https://docs.marklogic.com/guide/rest-dev/search#id_69918), either as JSON or XML. You can also use -`--stringQuery` to leverage MarkLogic's +`--string-query` to leverage MarkLogic's [search grammar](https://docs.marklogic.com/guide/search-dev/string-query) for selecting documents. The following command shows a collection, a string query, and a structured query used together, resulting @@ -138,12 +138,12 @@ in 4 JSON documents being written to `./export/employee`: ``` ./bin/nt export-files \ - --connectionString "nt-user:password@localhost:8004" \ + --connection-string "nt-user:password@localhost:8004" \ --collections employee \ - --stringQuery Engineering \ + --string-query Engineering \ --query '{"query": {"value-query": {"json-property": "job_title", "text": "Junior Executive"}}}' \ --path export \ - --prettyPrint + --pretty-print ``` See [the Export guide](export/export.md) for more information. @@ -151,7 +151,7 @@ See [the Export guide](export/export.md) for more information. ### Exporting to S3 NT allows for data to be exported to S3, with the same approach working for importing data as well. You can -reference an S3 bucket path via the `s3a://` prefix. The `--s3AddCredentials` option will then use the AWS SDK to access your +reference an S3 bucket path via the `s3a://` prefix. The `--s3-add-credentials` option will then use the AWS SDK to access your AWS credentials; please see the [AWS documentation](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html) for information on how to configure your credentials. @@ -160,12 +160,12 @@ bucket, ensuring that your AWS credentials give you access to writing to the buc ``` ./bin/nt export-files \ - --connectionString "nt-user:password@localhost:8004" \ + --connection-string "nt-user:password@localhost:8004" \ --collections employee \ --compression zip \ - --zipFileCount 1 \ + --zip-file-count 1 \ --path s3a://bucket-name-changeme \ - --s3AddCredentials + --s3-add-credentials ``` ### Exporting rows @@ -176,7 +176,7 @@ destinations, such as Parquet files or an RDBMS. The following demonstrates writ ``` mkdir export/parquet ./bin/nt export-parquet-files \ - --connectionString "nt-user:password@localhost:8004" \ + --connection-string "nt-user:password@localhost:8004" \ --path export/parquet \ --query "op.fromView('Example', 'Employees', '')" ``` @@ -187,10 +187,10 @@ Change the details in it to match your database and JDBC driver, ensuring that t ``` ./bin/nt export-jdbc \ - --connectionString "nt-user:password@localhost:8004" \ + --connection-string "nt-user:password@localhost:8004" \ --query "op.fromView('Example', 'Employees', '')" \ - --jdbcUrl "jdbc:postgresql://localhost/postgres?user=postgres&password=postgres" \ - --jdbcDriver "org.postgresql.Driver" \ + --jdbc-url "jdbc:postgresql://localhost/postgres?user=postgres&password=postgres" \ + --jdbc-driver "org.postgresql.Driver" \ --table employees \ --mode overwrite ``` @@ -204,7 +204,7 @@ command can preview 10 rows read from MarkLogic without writing any data to file ``` ./bin/nt export-parquet-files \ - --connectionString "nt-user:password@localhost:8004" \ + --connection-string "nt-user:password@localhost:8004" \ --query "op.fromView('Example', 'Employees', '')" \ --path export/parquet \ --preview 10 @@ -220,9 +220,9 @@ documents: ``` ./bin/nt reprocess \ - --connectionString "nt-user:password@localhost:8004" \ - --readJavascript "cts.uris(null, null, cts.collectionQuery('employee'))" \ - --writeJavascript "declareUpdate(); xdmp.documentAddCollections(URI, 'reprocessed')" + --connection-string "nt-user:password@localhost:8004" \ + --read-javascript "cts.uris(null, null, cts.collectionQuery('employee'))" \ + --write-javascript "declareUpdate(); xdmp.documentAddCollections(URI, 'reprocessed')" ``` In qconsole, you can see that the 1000 employee documents are now also in the `reprocessed` collection. @@ -241,9 +241,9 @@ MarkLogic instance: ``` ./bin/nt copy \ - --connectionString "nt-user:password@localhost:8004" \ + --connection-string "nt-user:password@localhost:8004" \ --collections employee \ - --outputConnectionString "nt-user:password@localhost:8000" + --output-connection-string "nt-user:password@localhost:8000" ``` For more information, please see the [Copying guide](copy.md). diff --git a/docs/import/common-import-features.md b/docs/import/common-import-features.md index 4d31c67b..bb1c17c0 100644 --- a/docs/import/common-import-features.md +++ b/docs/import/common-import-features.md @@ -22,23 +22,23 @@ command reads. You can use the following command line options to control the URI | Option | Description | | --- | --- | -| --uriPrefix | A prefix to apply to each URI. | -| --uriSuffix | A suffix to apply to each URI. | -| --uriReplace | Comma-delimited list of regular expressions and replacement values, with each replacement value surrounded by single quotes. | -| --uriTemplate | Template for each URI containing one or more column names. | +| --uri-prefix | A prefix to apply to each URI. | +| --uri-suffix | A suffix to apply to each URI. | +| --uri-replace | Comma-delimited list of regular expressions and replacement values, with each replacement value surrounded by single quotes. | +| --uri-template | Template for each URI containing one or more column names. | ### Replacing URI contents -When importing data from files where the initial URI is based on an absolute file path, the `--uriReplace` option can +When importing data from files where the initial URI is based on an absolute file path, the `--uri-replace` option can be used to remove much of the file path from the URI, though this is not required. For example, if you import files from a path of `/path/to/my/data` and you only want to include `/data` in your URIs, you would include the following option: - --uriReplace ".*/data,'/data'" + --uri-replace ".*/data,'/data'" ### Configuring URIs via a template -The `--uriTemplate` option allows you to configure a URI based on a JSON representation of each row that a command +The `--uri-template` option allows you to configure a URI based on a JSON representation of each row that a command reads from its associated data source. This option is supported for the following commands: - `import-avro-files` @@ -60,15 +60,15 @@ to either a top-level field name in the JSON representation of a record, or it m For example, consider an employee data source where the JSON representation of each record from that data source has top-level fields of `id` and `last_name`. You could configure a URI for each document using the following option: - --uriTemplate "/employee/{id}/{last_name}.json" + --uri-template "/employee/{id}/{last_name}.json" -A JSON Pointer expression is useful in conjunction with the optional `--jsonRootName` option for defining a root field +A JSON Pointer expression is useful in conjunction with the optional `--json-root-name` option for defining a root field name in each JSON document. For example, using the above example, you may want each employee document to have a single root field of "employee" so that each document is more self-describing. The URI template will be evaluated against a JSON document with this root field applied, so you would need to use JSON Pointer expressions to refer to the `id` and `last_name` values: - --jsonRootName employee --uriTemplate "/employee/{/employee/id}/{/employee/last_name}.json" + --json-root-name employee --uri-template "/employee/{/employee/id}/{/employee/last_name}.json" ## Configuring document metadata @@ -86,14 +86,14 @@ Each of the above types of metadata can be configured via the following options: | --- | --- | | --collections | Comma-delimited list of collection names to add to each document. | | --permissions | Comma-delimited list of MarkLogic role names and capabilities - e.g. `rest-reader,read,rest-writer,update`. | -| --temporalCollection | Name of a MarkLogic temporal collection to assign to each document. | +| --temporal-collection | Name of a MarkLogic temporal collection to assign to each document. | The following shows an example of each option: ``` --collections employees,imported-data \ --permissions my-reader-role,read,my-writer-role,update \ ---temporalCollection my-temporal-data +--temporal-collection my-temporal-data ``` ## Transforming content @@ -104,5 +104,5 @@ to each document before it is written. A transform is configured via the followi | Option | Description | | --- | --- | | --transform | Name of a MarkLogic REST transform to apply to the document before writing it. | -| --transformParams | Comma-delimited list of transform parameter names and values - e.g. param1,value1,param2,value2. | -| --transformParamsDelimiter | Delimiter for `--transformParams`; typically set when a value contains a comma. | +| --transform-params | Comma-delimited list of transform parameter names and values - e.g. param1,value1,param2,value2. | +| --transform-params-delimiter | Delimiter for `--transform-params`; typically set when a value contains a comma. | diff --git a/docs/import/import-files/aggregate-xml.md b/docs/import/import-files/aggregate-xml.md index d0593673..65070c05 100644 --- a/docs/import/import-files/aggregate-xml.md +++ b/docs/import/import-files/aggregate-xml.md @@ -24,23 +24,23 @@ will be used as the root of an XML document written to MarkLogic. The `--namespa has an associated namespace: ``` -./bin/nt import-aggregate-xml-files --path /path/to/files --connectionString user:password@localhost:8000 \ +./bin/nt import-aggregate-xml-files --path /path/to/files --connection-string user:password@localhost:8000 \ --element employee --namespace org:example ``` ## Controlling document URIs In addition to the options for controlling URIs described in the [common import features guide](../common-import-features.md), -you can use the `--uriElement` and `--uriNamespace` options to identify an element in each XML document whose value should +you can use the `--uri-element` and `--uri-namespace` options to identify an element in each XML document whose value should be included in the URI: ``` -./bin/nt import-aggregate-xml-files --path /path/to/files --connectionString user:password@localhost:8000 \ +./bin/nt import-aggregate-xml-files --path /path/to/files --connection-string user:password@localhost:8000 \ --element employee --namespace org:example \ - --uriElement employee ID --namespace org:example + --uri-element employee ID --namespace org:example ``` -You may still wish to use options like `--uriPrefix` and `--uriSuffix` to make the URI more self-describing. +You may still wish to use options like `--uri-prefix` and `--uri-suffix` to make the URI more self-describing. ## Compressed XML files diff --git a/docs/import/import-files/archives.md b/docs/import/import-files/archives.md index c1c01c76..937b4e4b 100644 --- a/docs/import/import-files/archives.md +++ b/docs/import/import-files/archives.md @@ -23,7 +23,7 @@ The `import-archive-files` command will import the documents and metadata files `export-archive-files` command. You must specify at least one `--path` option along with connection information for the MarkLogic database you wish to write to: - ./bin/nt import-archive-files --path /path/to/files --connectionString "user:password@localhost:8000" + ./bin/nt import-archive-files --path /path/to/files --connection-string "user:password@localhost:8000" ## Importing MLCP archives @@ -32,7 +32,7 @@ You can also import that were produced via the `EXPORT` command in MLCP. The `import-mlcp-archive-files` command is used instead, and it also requires at least one `--path` option along with connection information for the MarkLogic database you wish to write to: - ./bin/nt import-mlcp-archive-files --path /path/to/files --connectionString "user:password@localhost:8000" + ./bin/nt import-mlcp-archive-files --path /path/to/files --connection-string "user:password@localhost:8000" ## Restricting metadata diff --git a/docs/import/import-files/avro.md b/docs/import/import-files/avro.md index d36fc239..26bcd59b 100644 --- a/docs/import/import-files/avro.md +++ b/docs/import/import-files/avro.md @@ -20,19 +20,19 @@ The `import-avro-files` command is used to read Avro files and write the content documents in MarkLogic. You must specify at least one `--path` option along with connection information for the MarkLogic database you wish to write to: - ./bin/nt import-avro-files --path /path/to/files --connectionString "user:password@localhost:8000" + ./bin/nt import-avro-files --path /path/to/files --connection-string "user:password@localhost:8000" ## Specifying a JSON root name By default, each column found in an Avro file will become a top-level field in the JSON document written to MarkLogic. It is often useful to have a single "root" field in a JSON document so that it is more self-describing. It -can help with indexing purposes in MarkLogic as well. To include a JSON root field, use the `--jsonRootName` option with +can help with indexing purposes in MarkLogic as well. To include a JSON root field, use the `--json-root-name` option with a value for the name of the root field. The data read from a row will then be nested under this root field. ## Creating XML documents -To create XML documents for the rows in an Avro file instead of JSON documents, include the `--xmlRootName` -option to specify the name of the root element in each XML document. You can optionally include `--xmlNamespace` to +To create XML documents for the rows in an Avro file instead of JSON documents, include the `--xml-root-name` +option to specify the name of the root element in each XML document. You can optionally include `--xml-namespace` to specify a namespace for the root element that will then be inherited by every child element as well. ## Advanced options diff --git a/docs/import/import-files/delimited-text.md b/docs/import/import-files/delimited-text.md index 0a311e3b..7438c6ac 100644 --- a/docs/import/import-files/delimited-text.md +++ b/docs/import/import-files/delimited-text.md @@ -20,19 +20,19 @@ The `import-delimited-files` command is used to read delimited text files. The c the delimiter for each row value. You must specify at least one `--path` option along with connection information for the MarkLogic database you wish to write to: - ./bin/nt import-delimited-files --path /path/to/files --connectionString "user:password@localhost:8000" + ./bin/nt import-delimited-files --path /path/to/files --connection-string "user:password@localhost:8000" ## Specifying a JSON root name By default, each column found in a delimited text file will become a top-level field in the JSON document written to MarkLogic. It is often useful to have a single "root" field in a JSON document so that it is more self-describing. It -can help with indexing purposes in MarkLogic as well. To include a JSON root field, use the `--jsonRootName` option with +can help with indexing purposes in MarkLogic as well. To include a JSON root field, use the `--json-root-name` option with a value for the name of the root field. The data read from a row will then be nested under this root field. ## Creating XML documents -To create XML documents for the rows in a delimited text file instead of JSON documents, include the `--xmlRootName` -option to specify the name of the root element in each XML document. You can optionally include `--xmlNamespace` to +To create XML documents for the rows in a delimited text file instead of JSON documents, include the `--xml-root-name` +option to specify the name of the root element in each XML document. You can optionally include `--xml-namespace` to specify a namespace for the root element that will then be inherited by every child element as well. ## Advanced options diff --git a/docs/import/import-files/handling-errors.md b/docs/import/import-files/handling-errors.md index cf1e3b82..8ab4116a 100644 --- a/docs/import/import-files/handling-errors.md +++ b/docs/import/import-files/handling-errors.md @@ -10,10 +10,10 @@ If NT fails to write a batch of documents to MarkLogic, it will attempt to write batches until it concludes it cannot write a particular document. An error will be logged for that document and processing will continue. -To force NT to throw an error when it fails to write a batch of documents, include the `--abortOnWriteFailure` option. +To force NT to throw an error when it fails to write a batch of documents, include the `--abort-on-write-failure` option. -When NT is not using the `--abortOnWriteFailure` option, you can capture failed documents with their metadata in a -ZIP archive file. To enable this, include the `--failedDocumentsPath` option with a file path for where you want +When NT is not using the `--abort-on-write-failure` option, you can capture failed documents with their metadata in a +ZIP archive file. To enable this, include the `--failed-documents-path` option with a file path for where you want archive files written containing failed documents. You can later use the `import-archive-files` command to retry these failed documents, presumably after making a fix to either the data or your application that will allow the documents to be successfully imported. diff --git a/docs/import/import-files/json.md b/docs/import/import-files/json.md index 187a4de9..cb5a8f77 100644 --- a/docs/import/import-files/json.md +++ b/docs/import/import-files/json.md @@ -24,18 +24,18 @@ this behavior for an array of JSON objects, use the `import-files` command inste You must specify at least one `--path` option along with connection information for the MarkLogic database you wish to write to: - ./bin/nt import-json-files --path /path/to/files --connectionString "user:password@localhost:8000" + ./bin/nt import-json-files --path /path/to/files --connection-string "user:password@localhost:8000" ## Importing JSON Lines files -If your files conform to the JSON Lines format, include the `--jsonLines` option with no value. NT will then attempt +If your files conform to the JSON Lines format, include the `--json-lines` option with no value. NT will then attempt to read each line as a separate JSON object and write it to MarkLogic as a document. ## Specifying a JSON root name It is often useful to have a single "root" field in a JSON document so that it is more self-describing. It can help with indexing purposes in MarkLogic as well. To include a JSON root field in the JSON documents written by -`import-json-files, use the `--jsonRootName` option with a value for the name of the root field. The data read from a +`import-json-files, use the `--json-root-name` option with a value for the name of the root field. The data read from a row will then be nested under this root field. ## Advanced options @@ -45,5 +45,5 @@ the [Spark JSON options](https://spark.apache.org/docs/latest/sql-data-sources-j to control how JSON content is read. Dynamic options are expressed as `-PoptionName=optionValue`. To support the more common use case of reading files containing JSON objects and arrays, the command defaults to setting -the `multiLine` option to `true`. The `--jsonLines` option is thus effectively a shortcut for changing the `multiLine` +the `multiLine` option to `true`. The `--json-lines` option is thus effectively a shortcut for changing the `multiLine` option to have a value of `false`. diff --git a/docs/import/import-files/orc.md b/docs/import/import-files/orc.md index 332bce86..25b23fb3 100644 --- a/docs/import/import-files/orc.md +++ b/docs/import/import-files/orc.md @@ -20,19 +20,19 @@ The `import-orc-files` command is used to read ORC files and write the contents documents in MarkLogic. You must specify at least one `--path` option along with connection information for the MarkLogic database you wish to write to: - ./bin/nt import-orc-files --path /path/to/files --connectionString "user:password@localhost:8000" + ./bin/nt import-orc-files --path /path/to/files --connection-string "user:password@localhost:8000" ## Specifying a JSON root name By default, each column found in an ORC file will become a top-level field in the JSON document written to MarkLogic. It is often useful to have a single "root" field in a JSON document so that it is more self-describing. It -can help with indexing purposes in MarkLogic as well. To include a JSON root field, use the `--jsonRootName` option with +can help with indexing purposes in MarkLogic as well. To include a JSON root field, use the `--json-root-name` option with a value for the name of the root field. The data read from a row will then be nested under this root field. ## Creating XML documents -To create XML documents for the rows in an ORC file instead of JSON documents, include the `--xmlRootName` -option to specify the name of the root element in each XML document. You can optionally include `--xmlNamespace` to +To create XML documents for the rows in an ORC file instead of JSON documents, include the `--xml-root-name` +option to specify the name of the root element in each XML document. You can optionally include `--xml-namespace` to specify a namespace for the root element that will then be inherited by every child element as well. ## Advanced options diff --git a/docs/import/import-files/parquet.md b/docs/import/import-files/parquet.md index d7c3f856..5506ebde 100644 --- a/docs/import/import-files/parquet.md +++ b/docs/import/import-files/parquet.md @@ -20,19 +20,19 @@ The `import-parquet-files` command is used to read Parquet files and write the c documents in MarkLogic. You must specify at least one `--path` option along with connection information for the MarkLogic database you wish to write to: - ./bin/nt import-parquet-files --path /path/to/files --connectionString "user:password@localhost:8000" + ./bin/nt import-parquet-files --path /path/to/files --connection-string "user:password@localhost:8000" ## Specifying a JSON root name By default, each column found in a Parquet file will become a top-level field in the JSON document written to MarkLogic. It is often useful to have a single "root" field in a JSON document so that it is more self-describing. It -can help with indexing purposes in MarkLogic as well. To include a JSON root field, use the `--jsonRootName` option with +can help with indexing purposes in MarkLogic as well. To include a JSON root field, use the `--json-root-name` option with a value for the name of the root field. The data read from a row will then be nested under this root field. ## Creating XML documents -To create XML documents for the rows in a Parquet file instead of JSON documents, include the `--xmlRootName` -option to specify the name of the root element in each XML document. You can optionally include `--xmlNamespace` to +To create XML documents for the rows in a Parquet file instead of JSON documents, include the `--xml-root-name` +option to specify the name of the root element in each XML document. You can optionally include `--xml-namespace` to specify a namespace for the root element that will then be inherited by every child element as well. ## Advanced options diff --git a/docs/import/import-files/rdf.md b/docs/import/import-files/rdf.md index 0c814a55..e98086d8 100644 --- a/docs/import/import-files/rdf.md +++ b/docs/import/import-files/rdf.md @@ -24,7 +24,7 @@ document is an XML document containing up to 100 semantic triples. To import RDF files, you must specify at least one `--path` option along with connection information for the MarkLogic database you wish to write to: - ./bin/nt import-rdf-files --path /path/to/files --connectionString "user:password@localhost:8000" + ./bin/nt import-rdf-files --path /path/to/files --connection-string "user:password@localhost:8000" ## Supported files types @@ -48,11 +48,11 @@ specified within the quad. To specify a different graph for every triple (which will not apply to quads), use the `--graph` option. To specify a graph for both triples and quads - thus overriding the graph associated with each quad - use the -`--graphOverride` option. +`--graph-override` option. Note that the set of collections specified via the `--collections` option does not have any impact on the graph. You are free to specify as many collections as you want in addition to the graph you choose via `--graph` or -`--graphOverride`. +`--graph-override`. ## Compressed files diff --git a/docs/import/import-files/regular-files.md b/docs/import/import-files/regular-files.md index dc96ab64..9d227cba 100644 --- a/docs/import/import-files/regular-files.md +++ b/docs/import/import-files/regular-files.md @@ -22,20 +22,20 @@ The `import-files` command is used to import a set of files into MarkLogic, with document. You must specify at least one `--path` option along with connection information for the MarkLogic database you wish to write to. For example: - ./bin/nt import-files --path /path/to/files --connectionString user:password@localhost:8000 + ./bin/nt import-files --path /path/to/files --connection-string user:password@localhost:8000 ## Controlling document URIs Each document will have an initial URI based on the absolute path of the associated file. See [common import features](../common-import-features.md) for details on adjusting this URI. In particular, the -`--uriReplace` option is often useful for removing most of the absolute path to produce a concise, self-describing +`--uri-replace` option is often useful for removing most of the absolute path to produce a concise, self-describing URI. ## Specifying a document type The type of each document written to MarkLogic is determined by the file extension found in the URI along with the set of [MIME types configured in MarkLogic](https://docs.marklogic.com/admin-help/mimetype). For unrecognized file -extensions, or URIs that do not have a file extension, you can force a document type via the `--documentType` option. +extensions, or URIs that do not have a file extension, you can force a document type via the `--document-type` option. The value of this option must be one of `JSON`, `XML`, or `TEXT`. ## Importing Gzip files @@ -48,5 +48,5 @@ with a value of `GZIP`. You can also import Gzip files as-is - i.e. without deco To import each entry in a ZIP file as a separate document, include the `--compression` option with a value of `ZIP`. Each document will have an initial URI based on both the absolute path of the ZIP file and the name of the ZIP entry. -You can also use the `--documentType` option as described above to force a document type for any entry that has a file +You can also use the `--document-type` option as described above to force a document type for any entry that has a file extension not recognized by MarkLogic. diff --git a/docs/import/import-files/selecting-files.md b/docs/import/import-files/selecting-files.md index ddd653af..c37f7760 100644 --- a/docs/import/import-files/selecting-files.md +++ b/docs/import/import-files/selecting-files.md @@ -32,10 +32,10 @@ NT can read files from S3 via a path expression of the form `s3a://bucket-name/o In most cases, NT must use your AWS credentials to access an S3 bucket. NT uses the AWS SDK to fetch credentials from [locations supported by the AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-authentication-short-term.html). -To enable this, include the `--s3AddCredentials` option: +To enable this, include the `--s3-add-credentials` option: ``` -./bin/nt import-files --path "s3a://my-bucket/some/path" --s3AddCredentials +./bin/nt import-files --path "s3a://my-bucket/some/path" --s3-add-credentials ``` ## Ignoring child directories @@ -43,7 +43,7 @@ To enable this, include the `--s3AddCredentials` option: By default, child directories of each directory specified by `--path` are included. To prevent this, include the following option: - --recursiveFileLookup false + --recursive-file-lookup false ## Filtering files diff --git a/docs/import/import-jdbc.md b/docs/import/import-jdbc.md index 1a1925e8..5e45be26 100644 --- a/docs/import/import-jdbc.md +++ b/docs/import/import-jdbc.md @@ -26,10 +26,10 @@ NT. The `import-jdbc` command requires that you specify connection details for the database you wish to read from via JDBC. Connection details are specified via the following options: -- `--jdbcUrl` is required and specifies the JDBC connection URL. -- `--jdbcDriver` is required specifies the main class name of the JDBC driver. -- `--jdbcUser` specifies an optional user to authenticate as (this may already be specified via `--jdbcUrl`). -- `--jdbcPassword` specifies an optional password to authenticate with (this may already be specified via `--jdbcUrl`). +- `--jdbc-url` is required and specifies the JDBC connection URL. +- `--jdbc-driver` is required specifies the main class name of the JDBC driver. +- `--jdbc-user` specifies an optional user to authenticate as (this may already be specified via `--jdbc-url`). +- `--jdbc-password` specifies an optional password to authenticate with (this may already be specified via `--jdbc-url`). ## Importing data @@ -44,13 +44,13 @@ The SQL query can contain any syntax supported by your database. By default, each column a row will become a top-level field in the JSON document written to MarkLogic. It is often useful to have a single "root" field in a JSON document so that it is more self-describing. It -can help with indexing purposes in MarkLogic as well. To include a JSON root field, use the `--jsonRootName` option with +can help with indexing purposes in MarkLogic as well. To include a JSON root field, use the `--json-root-name` option with a value for the name of the root field. The data read from a row will then be nested under this root field. ## Creating XML documents -To create an XML document for each row instead of a JSON document, include the `--xmlRootName` -option to specify the name of the root element in each XML document. You can optionally include `--xmlNamespace` to +To create an XML document for each row instead of a JSON document, include the `--xml-root-name` +option to specify the name of the root element in each XML document. You can optionally include `--xml-namespace` to specify a namespace for the root element that will then be inherited by every child element as well. ## Aggregating rows @@ -62,7 +62,7 @@ structures that better represent complex entities. To facilitate producing hierarchical documents with multiple sets of related data, the following options can be used to combine multiple rows from a SQL query (which typically will include one or more joins) into hierarchical documents: -- `--groupBy` specifies a column name to group rows by; this is typically the column used in a join. +- `--group-by` specifies a column name to group rows by; this is typically the column used in a join. - `--aggregate` specifies a string of the form `new_column_name=column1;column2;column3`. The `new_column_name` column will contain an array of objects, with each object having columns of `column`, `column2`, and `column3`. @@ -74,7 +74,7 @@ following options would be used to achieve that (connection details are omitted ``` ./bin/nt import-jdbc \ --query "select c.*, p.payment_id, p.amount, p.payment_date from customer c inner join payment p on c.customer_id = p.customer_id" \ - --groupBy customer_id \ + --group-by customer_id \ --aggregate "payments=payment_id;amount;payment_date" ``` diff --git a/docs/import/tuning-performance.md b/docs/import/tuning-performance.md index 376bc036..4b2f6576 100644 --- a/docs/import/tuning-performance.md +++ b/docs/import/tuning-performance.md @@ -9,13 +9,13 @@ When writing to MarkLogic, the two main settings within NT that affect performan cluster itself is configured, such as index settings and number of hosts) are the batch size - the number of documents sent in a request to MarkLogic - and the number of threads used to send requests to MarkLogic. -Batch size is configured via the `--batchSize` option, which defaults to a value of 200. Depending on the size of +Batch size is configured via the `--batch-size` option, which defaults to a value of 200. Depending on the size of your documents, you may find improved performance by raising this value significantly for smaller documents, such as 500 or even 1000. For the number of threads used to send requests to MarkLogic, two factors come into play. The product of the -number of partitions and the value of the `--threadCount` option determines how many total threads will be used to send -requests. For example, if the import command uses 4 partitions to read data and `--threadCount` is set to 4 (its +number of partitions and the value of the `--thread-count` option determines how many total threads will be used to send +requests. For example, if the import command uses 4 partitions to read data and `--thread-count` is set to 4 (its default value), 16 total threads will send requests to MarkLogic. The number of partitions is determined by how data is read and differs across the various import commands. @@ -35,10 +35,10 @@ by placing a load balancer in front of MarkLogic or by configuring direct connec The rule of thumb can thus be expressed as: - Number of partitions * Value of --threadCount <= Number of hosts * number of app server threads + Number of partitions * Value of --thread-count <= Number of hosts * number of app server threads ### Direct connections to each host In a scenario where NT can connect directly to each host in your MarkLogic cluster without a load balancer being -present, you can set the `--connectionType` option to a value of `direct`. NT will then effectively act as a load +present, you can set the `--connection-type` option to a value of `direct`. NT will then effectively act as a load balancer by distributing work across each host in the cluster. diff --git a/docs/reprocess.md b/docs/reprocess.md index 182fd8a0..8042d4aa 100644 --- a/docs/reprocess.md +++ b/docs/reprocess.md @@ -28,19 +28,19 @@ processing you wish. For the reader, you must specify one of the following options: -- `--readJavascript` = JavaScript code for reading data. -- `--readJavascriptFile` = path to file containing JavaScript code for reading data. -- `--readXquery` = XQuery code for reading data. -- `--readXqueryFile` = path to file containing XQuery code for reading data. -- `--readInvoke` = path of a MarkLogic server module to invoke for reading data. +- `--read-javascript` = JavaScript code for reading data. +- `--read-javascript-file` = path to file containing JavaScript code for reading data. +- `--read-xquery` = XQuery code for reading data. +- `--read-xquery-file` = path to file containing XQuery code for reading data. +- `--read-invoke` = path of a MarkLogic server module to invoke for reading data. For the writer, you must specify one of the following options: -- `--writeJavascript` = JavaScript code for writing data. -- `--writeJavascriptFile` = path to file containing JavaScript code for writing data. -- `--writeXquery` = XQuery code for writing data. -- `--writeXqueryFile` = path to file containing XQuery code for writing data. -- `--writeInvoke` = path of a MarkLogic server module to invoke for writing data. +- `--write-javascript` = JavaScript code for writing data. +- `--write-javascript-file` = path to file containing JavaScript code for writing data. +- `--write-xquery` = XQuery code for writing data. +- `--write-xquery-file` = path to file containing XQuery code for writing data. +- `--write-invoke` = path of a MarkLogic server module to invoke for writing data. You must also specify [connection information](common-options.md) for the MarkLogic database containing the data you wish to reprocess. @@ -48,9 +48,9 @@ you wish to reprocess. The following shows a simple example of querying a collection for its URIs and logging each one: ``` -./bin/nt reprocess --connectionString user:password@localhost:8000 \ - --readJavascript "cts.uris(null, null, cts.collectionQuery('example'))" - --writeJavascript "var URI; console.log(URI)" +./bin/nt reprocess --connection-string user:password@localhost:8000 \ + --read-javascript "cts.uris(null, null, cts.collectionQuery('example'))" + --write-javascript "var URI; console.log(URI)" ``` ## Configuring variables @@ -58,18 +58,18 @@ The following shows a simple example of querying a collection for its URIs and l You can define variables in your custom code, regardless of how you define that code. Variables allow for code to be reused with different inputs that are defined as command line options. -For variables in your reader, you can include the `--readVar` option multiple times with each value being of the -form `variableName=variableValue`. Likewise for the writer, you can include the `--writeVar` option multiple times +For variables in your reader, you can include the `--read-var` option multiple times with each value being of the +form `variableName=variableValue`. Likewise for the writer, you can include the `--write-var` option multiple times with each value being of the same form. The following shows a simple example of including a variable in both the reader and writer: ``` -./bin/nt reprocess --connectionString user:password@localhost:8000 \ - --readJavascript "var collection; cts.uris(null, null, cts.collectionQuery(collection))" - --readVar "collection=example" - --writeJavascript "var URI; var exampleVariable; console.log([URI, exampleVariable])" - --writeVar "exampleVariable=testValue" +./bin/nt reprocess --connection-string user:password@localhost:8000 \ + --read-javascript "var collection; cts.uris(null, null, cts.collectionQuery(collection))" + --read-var "collection=example" + --write-javascript "var URI; var exampleVariable; console.log([URI, exampleVariable])" + --write-var "exampleVariable=testValue" ``` ## Defining reader partitions @@ -78,11 +78,11 @@ NT will send a single request to MarkLogic to execute your reader code. If your and is at risk of timing out, or if you seek better performance by breaking your query into many smaller queries, you can use one of the following options to define partitions for your reader: -- `--readPartitionsJavascript` = JavaScript code that returns partitions. -- `--readPartitionsJavascriptFile` = path to file containing JavaScript code that returns partitions. -- `--readPartitionsXquery` = XQuery code that returns partitions. -- `--readPartitionsXqueryFile` = path to file containing XQuery code that returns partitions. -- `--readPartitionsInvoke` = path of a MarkLogic server module to invoke for returning partitions. +- `--read-partitions-javascript` = JavaScript code that returns partitions. +- `--read-partitions-javascript-file` = path to file containing JavaScript code that returns partitions. +- `--read-partitions-xquery` = XQuery code that returns partitions. +- `--read-partitions-xquery-file` = path to file containing XQuery code that returns partitions. +- `--read-partitions-invoke` = path of a MarkLogic server module to invoke for returning partitions. For each partition returned, the reader code will be invoked with a variable named `PARTITION` containing the value of the partition. Your reader code is then free to use that value however you wish. @@ -91,13 +91,13 @@ Partition values can thus be anything you want. A common use case is to partitio MarkLogic database. The following shows an example of partitions based on forests: ``` -./bin/nt reprocess --connectionString user:password@localhost:8000 \ - --readPartitionsJavascript "xdmp.databaseForests(xdmp.database())" - --readJavascript "cts.uris(null, null, cts.collectionQuery('example'), 0, [PARTITION])" - --writeJavascript "var URI; console.log(URI)" +./bin/nt reprocess --connection-string user:password@localhost:8000 \ + --read-partitions-javascript "xdmp.databaseForests(xdmp.database())" + --read-javascript "cts.uris(null, null, cts.collectionQuery('example'), 0, [PARTITION])" + --write-javascript "var URI; console.log(URI)" ``` -In the above example, the code defined by `--readJavascript` will be invoked once for each forest ID returned by the code -defined by `--readPartitionsJavascript`. The value of `PARTITION` - a forest ID - is then passed to the +In the above example, the code defined by `--read-javascript` will be invoked once for each forest ID returned by the code +defined by `--read-partitions-javascript`. The value of `PARTITION` - a forest ID - is then passed to the [cts.uris](https://docs.marklogic.com/cts.uris) function to constrain it to a particular forest. With this approach, the query is broken up into N queries that run in parallel, with N equalling the number of forests in the database. diff --git a/mlcp-testing/build.gradle b/mlcp-testing/build.gradle index c8255e70..2cecab18 100644 --- a/mlcp-testing/build.gradle +++ b/mlcp-testing/build.gradle @@ -70,37 +70,37 @@ task ntImportRdf(type: Exec) { description = "Intended to run against a local Caddy load balancer." workingDir = "../nt/" commandLine "./bin/nt", "import-rdf-files", - "--connectionString", "${mlUsername}:${mlPassword}@${lbHost}:${lbPort}", + "--connection-string", "${mlUsername}:${mlPassword}@${lbHost}:${lbPort}", "--path", rdfFile, "--permissions", "rest-reader,read,rest-writer,update", - "--connectionType", "gateway", - "--threadCount", "64", - "--batchSize", "100" + "--connection-type", "gateway", + "--thread-count", "64", + "--batch-size", "100" } task ntDirectImportRdf(type: Exec) { description = "For testing without a load balancer." workingDir = "../nt/" commandLine "./bin/nt", "import-rdf-files", - "--connectionString", "${mlUsername}:${mlPassword}@${mlHost}:${mlRestPort}", + "--connection-string", "${mlUsername}:${mlPassword}@${mlHost}:${mlRestPort}", "--path", rdfFile, "--permissions", "rest-reader,read,rest-writer,update", - "--connectionType", "direct", - "--threadCount", "64", - "--batchSize", "100" + "--connection-type", "direct", + "--thread-count", "64", + "--batch-size", "100" } task ntCopy(type: Exec) { workingDir = "../nt/" commandLine "./bin/nt", "copy", - "--connectionString", "${mlUsername}:${mlPassword}@${mlHost}:${mlRestPort}", + "--connection-string", "${mlUsername}:${mlPassword}@${mlHost}:${mlRestPort}", "--collections", "address_small", - "--batchSize", "500", + "--batch-size", "500", // results in 24 threads total, assuming 3 forests and 4 partitions per forests, which means 12 partitions. - "--outputThreadCount", "2", - "--outputBatchSize", "500", - "--outputPermissions", "rest-reader,read,rest-writer,update", - "--outputConnectionString", "${outputUsername}:${outputPassword}@${outputHost}:${outputPort}" + "--output-thread-count", "2", + "--output-batch-size", "500", + "--output-permissions", "rest-reader,read,rest-writer,update", + "--output-connection-string", "${outputUsername}:${outputPassword}@${outputHost}:${outputPort}" } task mlcpCopy(type: JavaExec) { @@ -145,12 +145,12 @@ task ntTwoWaySSL(type: Exec) { "then run this task to ensure that two-way SSL works." workingDir = "../nt/" commandLine "./bin/nt", "copy", - "--connectionString", "rest-writer:x@localhost:8012", + "--connection-string", "rest-writer:x@localhost:8012", "--collections", "zipcode", - "--keyStorePath", "../mlcp-testing/keyStore.jks", - "--keyStorePassword", "password", - "--trustStorePath", "../mlcp-testing/trustStore.jks", - "--trustStorePassword", "password", - "--sslHostnameVerifier", "any", + "--keystore-path", "../mlcp-testing/keyStore.jks", + "--keystore-password", "password", + "--truststore-path", "../mlcp-testing/trustStore.jks", + "--truststore-password", "password", + "--ssl-hostname-verifier", "any", "--preview", "10" } diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/CommonParams.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/CommonParams.java index 17e7029d..bcd1004f 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/CommonParams.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/CommonParams.java @@ -22,10 +22,10 @@ public class CommonParams { @Parameter(names = "--preview", description = "Show up to the first N rows of data read by the command.") private Integer preview; - @Parameter(names = "--previewDrop", description = "Specify one or more columns to drop when using --preview.", variableArity = true) + @Parameter(names = "--preview-drop", description = "Specify one or more columns to drop when using --preview.", variableArity = true) private List previewColumnsToDrop = new ArrayList<>(); - @Parameter(names = "--previewVertical", description = "Preview the data in a vertical format instead of in a table.") + @Parameter(names = "--preview-vertical", description = "Preview the data in a vertical format instead of in a table.") private Boolean previewVertical; @Parameter(names = "--repartition", description = "Specify the number of partitions / workers to be used for writing data.") diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/ConnectionInputs.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/ConnectionInputs.java index cfb33e85..b9eb0da2 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/ConnectionInputs.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/ConnectionInputs.java @@ -53,7 +53,7 @@ public void validate(String name, String value) throws ParameterException { public String getSelectedHost() { if (connectionString != null) { - return new ConnectionString(connectionString, "--connectionString").getHost(); + return new ConnectionString(connectionString, "--connection-string").getHost(); } return host; } diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/ConnectionParams.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/ConnectionParams.java index 849e5b42..44a1ce63 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/ConnectionParams.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/ConnectionParams.java @@ -12,7 +12,7 @@ public class ConnectionParams extends ConnectionInputs implements ConnectionOpti @Override @Parameter( - names = {"--connectionString"}, + names = {"--connection-string"}, description = "Defines a connection string as user:password@host:port; only usable when using 'DIGEST' or 'BASIC' authentication.", validateWith = ConnectionStringValidator.class ) @@ -43,7 +43,7 @@ public ConnectionOptions port(int port) { @Override @Parameter( - names = "--basePath", + names = "--base-path", description = "Path to prepend to each call to a MarkLogic REST API app server." ) public ConnectionOptions basePath(String basePath) { @@ -62,7 +62,7 @@ public ConnectionOptions database(String database) { } @Parameter( - names = "--connectionType", + names = "--connection-type", description = "Defines whether connections can be made directly to each host in the MarkLogic cluster." ) public ConnectionOptions connectionType(DatabaseClient.ConnectionType connectionType) { @@ -77,7 +77,7 @@ public ConnectionOptions connectionType(String connectionType) { @Override @Parameter( - names = "--disableGzippedResponses", + names = "--disable-gzipped-responses", description = "If included, responses from MarkLogic will not be gzipped. May improve performance when responses are very small." ) public ConnectionOptions disableGzippedResponses(Boolean disableGzippedResponses) { @@ -87,7 +87,7 @@ public ConnectionOptions disableGzippedResponses(Boolean disableGzippedResponses @Override @Parameter( - names = "--authType", + names = "--auth-type", description = "Type of authentication to use." ) public ConnectionOptions authenticationType(AuthenticationType authType) { @@ -118,8 +118,8 @@ public ConnectionOptions password(String password) { @Override @Parameter( - names = "--certificateFile", - description = "File path for a key store to be used for 'CERTIFICATE' authentication." + names = "--certificate-file", + description = "File path for a keystore to be used for 'CERTIFICATE' authentication." ) public ConnectionOptions certificateFile(String certificateFile) { this.certificateFile = certificateFile; @@ -128,8 +128,8 @@ public ConnectionOptions certificateFile(String certificateFile) { @Override @Parameter( - names = "--certificatePassword", - description = "Password for the key store referenced by '--certificateFile'." + names = "--certificate-password", + description = "Password for the keystore referenced by '--certificate-file'." ) public ConnectionOptions certificatePassword(String certificatePassword) { this.certificatePassword = certificatePassword; @@ -138,7 +138,7 @@ public ConnectionOptions certificatePassword(String certificatePassword) { @Override @Parameter( - names = "--cloudApiKey", + names = "--cloud-api-key", description = "API key for authenticating with a MarkLogic Cloud cluster." ) public ConnectionOptions cloudApiKey(String cloudApiKey) { @@ -148,7 +148,7 @@ public ConnectionOptions cloudApiKey(String cloudApiKey) { @Override @Parameter( - names = "--kerberosPrincipal", + names = "--kerberos-principal", description = "Principal to be used with 'KERBEROS' authentication." ) public ConnectionOptions kerberosPrincipal(String kerberosPrincipal) { @@ -158,7 +158,7 @@ public ConnectionOptions kerberosPrincipal(String kerberosPrincipal) { @Override @Parameter( - names = "--samlToken", + names = "--saml-token", description = "Token to be used with 'SAML' authentication." ) public ConnectionOptions samlToken(String samlToken) { @@ -168,9 +168,9 @@ public ConnectionOptions samlToken(String samlToken) { @Override @Parameter( - names = "--sslProtocol", - description = "SSL protocol to use when the MarkLogic app server requires an SSL connection. If a key store " + - "or trust store is configured, defaults to 'TLSv1.2'." + names = "--ssl-protocol", + description = "SSL protocol to use when the MarkLogic app server requires an SSL connection. If a keystore " + + "or truststore is configured, defaults to 'TLSv1.2'." ) public ConnectionOptions sslProtocol(String sslProtocol) { this.sslProtocol = sslProtocol; @@ -179,7 +179,7 @@ public ConnectionOptions sslProtocol(String sslProtocol) { @Override @Parameter( - names = "--sslHostnameVerifier", + names = "--ssl-hostname-verifier", description = "Hostname verification strategy when connecting via SSL." ) public ConnectionOptions sslHostnameVerifier(SslHostnameVerifier sslHostnameVerifier) { @@ -189,8 +189,8 @@ public ConnectionOptions sslHostnameVerifier(SslHostnameVerifier sslHostnameVeri @Override @Parameter( - names = "--keyStorePath", - description = "File path for a key store for two-way SSL connections." + names = "--keystore-path", + description = "File path for a keystore for two-way SSL connections." ) public ConnectionOptions keyStorePath(String keyStorePath) { this.keyStorePath = keyStorePath; @@ -199,8 +199,8 @@ public ConnectionOptions keyStorePath(String keyStorePath) { @Override @Parameter( - names = "--keyStorePassword", - description = "Password for the key store identified by '--keyStorePath'." + names = "--keystore-password", + description = "Password for the keystore identified by '--keystore-path'." ) public ConnectionOptions keyStorePassword(String keyStorePassword) { this.keyStorePassword = keyStorePassword; @@ -209,8 +209,8 @@ public ConnectionOptions keyStorePassword(String keyStorePassword) { @Override @Parameter( - names = "--keyStoreType", - description = "Type of the key store identified by '--keyStorePath'; defaults to 'JKS'." + names = "--keystore-type", + description = "Type of the keystore identified by '--keystore-path'; defaults to 'JKS'." ) public ConnectionOptions keyStoreType(String keyStoreType) { this.keyStoreType = keyStoreType; @@ -219,8 +219,8 @@ public ConnectionOptions keyStoreType(String keyStoreType) { @Override @Parameter( - names = "--keyStoreAlgorithm", - description = "Algorithm of the key store identified by '--keyStorePath'; defaults to 'SunX509'." + names = "--keystore-algorithm", + description = "Algorithm of the keystore identified by '--keystore-path'; defaults to 'SunX509'." ) public ConnectionOptions keyStoreAlgorithm(String keyStoreAlgorithm) { this.keyStoreAlgorithm = keyStoreAlgorithm; @@ -229,8 +229,8 @@ public ConnectionOptions keyStoreAlgorithm(String keyStoreAlgorithm) { @Override @Parameter( - names = "--trustStorePath", - description = "File path for a trust store for establishing trust with the certificate used by the MarkLogic app server." + names = "--truststore-path", + description = "File path for a truststore for establishing trust with the certificate used by the MarkLogic app server." ) public ConnectionOptions trustStorePath(String trustStorePath) { this.trustStorePath = trustStorePath; @@ -239,8 +239,8 @@ public ConnectionOptions trustStorePath(String trustStorePath) { @Override @Parameter( - names = "--trustStorePassword", - description = "Password for the trust store identified by '--trustStorePath'." + names = "--truststore-password", + description = "Password for the truststore identified by '--truststore-path'." ) public ConnectionOptions trustStorePassword(String trustStorePassword) { this.trustStorePassword = trustStorePassword; @@ -249,8 +249,8 @@ public ConnectionOptions trustStorePassword(String trustStorePassword) { @Override @Parameter( - names = "--trustStoreType", - description = "Type of the trust store identified by '--trustStorePath'; defaults to 'JKS'." + names = "--truststore-type", + description = "Type of the truststore identified by '--truststore-path'; defaults to 'JKS'." ) public ConnectionOptions trustStoreType(String trustStoreType) { this.trustStoreType = trustStoreType; @@ -259,8 +259,8 @@ public ConnectionOptions trustStoreType(String trustStoreType) { @Override @Parameter( - names = "--trustStoreAlgorithm", - description = "Algorithm of the trust store identified by '--trustStorePath'; defaults to 'SunX509'." + names = "--truststore-algorithm", + description = "Algorithm of the truststore identified by '--truststore-path'; defaults to 'SunX509'." ) public ConnectionOptions trustStoreAlgorithm(String trustStoreAlgorithm) { this.trustStoreAlgorithm = trustStoreAlgorithm; diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/ConnectionParamsValidator.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/ConnectionParamsValidator.java index d3524f59..611189e3 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/ConnectionParamsValidator.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/ConnectionParamsValidator.java @@ -53,12 +53,12 @@ private static class ParamNames { final String password; ParamNames(boolean isOutput) { - connectionString = isOutput ? "--outputConnectionString" : "--connectionString"; - host = isOutput ? "--outputHost" : "--host"; - port = isOutput ? "--outputPort" : "--port"; - authType = isOutput ? "--outputAuthType" : "--authType"; - username = isOutput ? "--outputUsername" : "--username"; - password = isOutput ? "--outputPassword" : "--password"; + connectionString = isOutput ? "--output-connection-string" : "--connection-string"; + host = isOutput ? "--output-host" : "--host"; + port = isOutput ? "--output-port" : "--port"; + authType = isOutput ? "--output-auth-type" : "--auth-type"; + username = isOutput ? "--output-username" : "--username"; + password = isOutput ? "--output-password" : "--password"; } } } diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/JdbcParams.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/JdbcParams.java index 07111110..f8ab055e 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/JdbcParams.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/JdbcParams.java @@ -9,16 +9,16 @@ public class JdbcParams implements JdbcOptions { - @Parameter(names = "--jdbcUrl", required = true, description = "The JDBC URL to connect to.") + @Parameter(names = "--jdbc-url", required = true, description = "The JDBC URL to connect to.") private String url; - @Parameter(names = "--jdbcDriver", description = "The class name of the JDBC driver to use.") + @Parameter(names = "--jdbc-driver", description = "The class name of the JDBC driver to use.") private String driver; - @Parameter(names = "--jdbcUser", description = "The user to authenticate as, if not specified in the JDBC URL.") + @Parameter(names = "--jdbc-user", description = "The user to authenticate as, if not specified in the JDBC URL.") private String user; - @Parameter(names = "--jdbcPassword", description = "The password to user for authentication, if not specified in the JDBC URL.") + @Parameter(names = "--jdbc-password", description = "The password to user for authentication, if not specified in the JDBC URL.") private String password; @DynamicParameter( diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/S3Params.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/S3Params.java index da65ae4f..5d67f992 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/S3Params.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/S3Params.java @@ -11,13 +11,13 @@ public class S3Params { @Parameter( - names = "--s3AddCredentials", + names = "--s3-add-credentials", description = "Add credentials retrieved via the AWS SDK to the Spark context for use when accessing S3." ) private boolean addCredentials; @Parameter( - names = "--s3Endpoint", + names = "--s3-endpoint", description = "Define the S3 endpoint for any operations involving S3; typically used when a " + "process like AWS EMR must access an S3 bucket in a separate region." ) diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/copy/CopyCommand.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/copy/CopyCommand.java index c994ab7f..a246e7f6 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/copy/CopyCommand.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/copy/CopyCommand.java @@ -49,92 +49,92 @@ public DocumentCopier.CopyReadDocumentsOptions categories(String... categories) public static class CopyWriteDocumentsParams implements WriteDocumentsOptions { @Parameter( - names = "--outputAbortOnWriteFailure", + names = "--output-abort-on-write-failure", description = "Include this option to cause the command to fail when a batch of documents cannot be written to MarkLogic." ) private Boolean abortOnWriteFailure; @Parameter( - names = "--outputBatchSize", + names = "--output-batch-size", description = "The number of documents written in a call to MarkLogic." ) private Integer batchSize = 100; @Parameter( - names = "--outputCollections", + names = "--output-collections", description = "Comma-delimited string of collection names to add to each document." ) private String collections; @Parameter( - names = "--outputFailedDocumentsPath", + names = "--output-failed-documents-path", description = "File path for writing an archive file containing failed documents and their metadata." ) private String failedDocumentsPath; @Parameter( - names = "--outputPermissions", + names = "--output-permissions", description = "Comma-delimited string of role names and capabilities to add to each document - e.g. role1,read,role2,update,role3,execute." ) private String permissions; @Parameter( - names = "--outputTemporalCollection", + names = "--output-temporal-collection", description = "Name of a temporal collection to assign to each document." ) private String temporalCollection; @Parameter( - names = "--outputThreadCount", + names = "--output-thread-count", description = "The number of threads used by each partition worker when writing batches of documents to MarkLogic." ) private Integer threadCount = 4; @Parameter( - names = "--outputTotalThreadCount", + names = "--output-total-thread-count", description = "The total number of threads used across all partitions when writing batches of documents to MarkLogic." ) private Integer totalThreadCount; @Parameter( - names = "--outputTransform", + names = "--output-transform", description = "Name of a MarkLogic REST API transform to apply to each document." ) private String transform; @Parameter( - names = "--outputTransformParams", + names = "--output-transform-params", description = "Comma-delimited string of REST API transform parameter names and values - e.g. param1,value1,param2,value2." ) private String transformParams; @Parameter( - names = "--outputTransformParamsDelimiter", - description = "Delimiter to use instead of a comma for the '--transformParams' parameter." + names = "--output-transform-params-delimiter", + description = "Delimiter to use instead of a comma for the '--transform-params' parameter." ) private String transformParamsDelimiter; @Parameter( - names = "--outputUriPrefix", + names = "--output-uri-prefix", description = "String to prepend to each document URI." ) private String uriPrefix; @Parameter( - names = "--outputUriReplace", + names = "--output-uri-replace", description = "Modify the URI for a document via a comma-delimited list of regular expression " + "and replacement string pairs - e.g. regex,'value',regex,'value'. Each replacement string must be enclosed by single quotes." ) private String uriReplace; @Parameter( - names = "--outputUriSuffix", + names = "--output-uri-suffix", description = "String to append to each document URI." ) private String uriSuffix; @Parameter( - names = "--outputUriTemplate", + names = "--output-uri-template", description = "String defining a template for constructing each document URI. " + "See https://marklogic.github.io/marklogic-spark-connector/writing.html for more information." ) diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/copy/OutputConnectionParams.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/copy/OutputConnectionParams.java index 39c9c271..d2b5cf9d 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/copy/OutputConnectionParams.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/copy/OutputConnectionParams.java @@ -48,7 +48,7 @@ private boolean atLeastOutputConnectionParameterExists(Map param @Override @Parameter( - names = {"--outputConnectionString"}, + names = {"--output-connection-string"}, description = "Defines a connection string as user:password@host:port; only usable when using 'DIGEST' or 'BASIC' authentication.", validateWith = ConnectionStringValidator.class ) @@ -59,7 +59,7 @@ public ConnectionOptions connectionString(String connectionString) { @Override @Parameter( - names = {"--outputHost"}, + names = {"--output-host"}, description = "The MarkLogic host to connect to." ) public ConnectionOptions host(String host) { @@ -69,7 +69,7 @@ public ConnectionOptions host(String host) { @Override @Parameter( - names = "--outputPort", + names = "--output-port", description = "Port of a MarkLogic REST API app server to connect to." ) public ConnectionOptions port(int port) { @@ -79,7 +79,7 @@ public ConnectionOptions port(int port) { @Override @Parameter( - names = "--outputBasePath", + names = "--output-base-path", description = "Path to prepend to each call to a MarkLogic REST API app server." ) public ConnectionOptions basePath(String basePath) { @@ -89,7 +89,7 @@ public ConnectionOptions basePath(String basePath) { @Override @Parameter( - names = "--outputDatabase", + names = "--output-database", description = "Name of a database to connect if it differs from the one associated with the app server identified by 'port'." ) public ConnectionOptions database(String database) { @@ -98,7 +98,7 @@ public ConnectionOptions database(String database) { } @Parameter( - names = "--outputConnectionType", + names = "--output-connection-type", description = "Defines whether connections can be made directly to each host in the MarkLogic cluster." ) public ConnectionOptions connectionType(DatabaseClient.ConnectionType connectionType) { @@ -120,7 +120,7 @@ public ConnectionOptions disableGzippedResponses(Boolean value) { @Override @Parameter( - names = "--outputAuthType", + names = "--output-auth-type", description = "Type of authentication to use." ) public ConnectionOptions authenticationType(AuthenticationType authType) { @@ -130,7 +130,7 @@ public ConnectionOptions authenticationType(AuthenticationType authType) { @Override @Parameter( - names = "--outputUsername", + names = "--output-username", description = "Username when using 'DIGEST' or 'BASIC' authentication." ) public ConnectionOptions username(String username) { @@ -140,7 +140,7 @@ public ConnectionOptions username(String username) { @Override @Parameter( - names = "--outputPassword", + names = "--output-password", description = "Password when using 'DIGEST' or 'BASIC' authentication.", password = true ) @@ -151,8 +151,8 @@ public ConnectionOptions password(String password) { @Override @Parameter( - names = "--outputCertificateFile", - description = "File path for a key store to be used for 'CERTIFICATE' authentication." + names = "--output-certificate-file", + description = "File path for a keystore to be used for 'CERTIFICATE' authentication." ) public ConnectionOptions certificateFile(String certificateFile) { this.certificateFile = certificateFile; @@ -161,8 +161,8 @@ public ConnectionOptions certificateFile(String certificateFile) { @Override @Parameter( - names = "--outputCertificatePassword", - description = "Password for the key store referenced by '--certificateFile'." + names = "--output-certificate-password", + description = "Password for the keystore referenced by '--certificate-file'." ) public ConnectionOptions certificatePassword(String certificatePassword) { this.certificatePassword = certificatePassword; @@ -171,7 +171,7 @@ public ConnectionOptions certificatePassword(String certificatePassword) { @Override @Parameter( - names = "--outputCloudApiKey", + names = "--output-cloud-api-key", description = "API key for authenticating with a MarkLogic Cloud cluster." ) public ConnectionOptions cloudApiKey(String cloudApiKey) { @@ -181,7 +181,7 @@ public ConnectionOptions cloudApiKey(String cloudApiKey) { @Override @Parameter( - names = "--outputKerberosPrincipal", + names = "--output-kerberos-principal", description = "Principal to be used with 'KERBEROS' authentication." ) public ConnectionOptions kerberosPrincipal(String kerberosPrincipal) { @@ -191,7 +191,7 @@ public ConnectionOptions kerberosPrincipal(String kerberosPrincipal) { @Override @Parameter( - names = "--outputSamlToken", + names = "--output-saml-token", description = "Token to be used with 'SAML' authentication." ) public ConnectionOptions samlToken(String samlToken) { @@ -201,9 +201,9 @@ public ConnectionOptions samlToken(String samlToken) { @Override @Parameter( - names = "--outputSslProtocol", - description = "SSL protocol to use when the MarkLogic app server requires an SSL connection. If a key store " + - "or trust store is configured, defaults to 'TLSv1.2'." + names = "--output-ssl-protocol", + description = "SSL protocol to use when the MarkLogic app server requires an SSL connection. If a keystore " + + "or truststore is configured, defaults to 'TLSv1.2'." ) public ConnectionOptions sslProtocol(String sslProtocol) { this.sslProtocol = sslProtocol; @@ -212,7 +212,7 @@ public ConnectionOptions sslProtocol(String sslProtocol) { @Override @Parameter( - names = "--outputSslHostnameVerifier", + names = "--output-ssl-hostname-verifier", description = "Hostname verification strategy when connecting via SSL." ) public ConnectionOptions sslHostnameVerifier(SslHostnameVerifier sslHostnameVerifier) { @@ -222,8 +222,8 @@ public ConnectionOptions sslHostnameVerifier(SslHostnameVerifier sslHostnameVeri @Override @Parameter( - names = "--outputKeyStorePath", - description = "File path for a key store for two-way SSL connections." + names = "--output-keystore-path", + description = "File path for a keystore for two-way SSL connections." ) public ConnectionOptions keyStorePath(String keyStorePath) { this.keyStorePath = keyStorePath; @@ -232,8 +232,8 @@ public ConnectionOptions keyStorePath(String keyStorePath) { @Override @Parameter( - names = "--outputKeyStorePassword", - description = "Password for the key store identified by '--keyStorePath'." + names = "--output-keystore-password", + description = "Password for the keystore identified by '--keystore-path'." ) public ConnectionOptions keyStorePassword(String keyStorePassword) { this.keyStorePassword = keyStorePassword; @@ -242,8 +242,8 @@ public ConnectionOptions keyStorePassword(String keyStorePassword) { @Override @Parameter( - names = "--outputKeyStoreType", - description = "Type of the key store identified by '--keyStorePath'; defaults to 'JKS'." + names = "--output-keystore-type", + description = "Type of the keystore identified by '--keystore-path'; defaults to 'JKS'." ) public ConnectionOptions keyStoreType(String keyStoreType) { this.keyStoreType = keyStoreType; @@ -252,8 +252,8 @@ public ConnectionOptions keyStoreType(String keyStoreType) { @Override @Parameter( - names = "--outputKeyStoreAlgorithm", - description = "Algorithm of the key store identified by '--keyStorePath'; defaults to 'SunX509'." + names = "--output-keystore-algorithm", + description = "Algorithm of the keystore identified by '--keystore-path'; defaults to 'SunX509'." ) public ConnectionOptions keyStoreAlgorithm(String keyStoreAlgorithm) { this.keyStoreAlgorithm = keyStoreAlgorithm; @@ -262,8 +262,8 @@ public ConnectionOptions keyStoreAlgorithm(String keyStoreAlgorithm) { @Override @Parameter( - names = "--outputTrustStorePath", - description = "File path for a trust store for establishing trust with the certificate used by the MarkLogic app server." + names = "--output-truststore-path", + description = "File path for a truststore for establishing trust with the certificate used by the MarkLogic app server." ) public ConnectionOptions trustStorePath(String trustStorePath) { this.trustStorePath = trustStorePath; @@ -272,8 +272,8 @@ public ConnectionOptions trustStorePath(String trustStorePath) { @Override @Parameter( - names = "--outputTrustStorePassword", - description = "Password for the trust store identified by '--trustStorePath'." + names = "--output-truststore-password", + description = "Password for the truststore identified by '--truststore-path'." ) public ConnectionOptions trustStorePassword(String trustStorePassword) { this.trustStorePassword = trustStorePassword; @@ -282,8 +282,8 @@ public ConnectionOptions trustStorePassword(String trustStorePassword) { @Override @Parameter( - names = "--outputTrustStoreType", - description = "Type of the trust store identified by '--trustStorePath'; defaults to 'JKS'." + names = "--output-truststore-type", + description = "Type of the truststore identified by '--truststore-path'; defaults to 'JKS'." ) public ConnectionOptions trustStoreType(String trustStoreType) { this.trustStoreType = trustStoreType; @@ -292,8 +292,8 @@ public ConnectionOptions trustStoreType(String trustStoreType) { @Override @Parameter( - names = "--outputTrustStoreAlgorithm", - description = "Algorithm of the trust store identified by '--trustStorePath'; defaults to 'SunX509'." + names = "--output-truststore-algorithm", + description = "Algorithm of the truststore identified by '--truststore-path'; defaults to 'SunX509'." ) public ConnectionOptions trustStoreAlgorithm(String trustStoreAlgorithm) { this.trustStoreAlgorithm = trustStoreAlgorithm; diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/export/ExportFilesCommand.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/export/ExportFilesCommand.java index b027a083..4fb50c9d 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/export/ExportFilesCommand.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/export/ExportFilesCommand.java @@ -64,10 +64,10 @@ public static class WriteGenericFilesParams implements Supplier imp public static class Validator extends AtLeastOneValidator { public Validator() { - super("--graphs", "--query", "--uris", "--stringQuery", "--collections", "--directory"); + super("--graphs", "--query", "--uris", "--string-query", "--collections", "--directory"); } } @@ -58,10 +58,10 @@ protected void applyWriter(SparkSession session, DataFrameWriter writer) { public static class ReadTriplesParams implements Supplier>, RdfFilesExporter.ReadTriplesDocumentsOptions { @Parameter(names = "--uris", description = "Newline-delimited sequence of document URIs to retrieve. Can be combined " + - "with --collections, --directory, and --stringQuery. If specified, --query will be ignored.") + "with --collections, --directory, and --string-query. If specified, --query will be ignored.") private String uris; - @Parameter(names = "--stringQuery", description = "A query utilizing the MarkLogic search grammar; " + + @Parameter(names = "--string-query", description = "A query utilizing the MarkLogic search grammar; " + "see https://docs.marklogic.com/guide/search-dev/string-query for more information.") private String stringQuery; @@ -81,10 +81,10 @@ public static class ReadTriplesParams implements Supplier>, @Parameter(names = "--options", description = "Name of a set of MarkLogic REST API search options.") private String options; - @Parameter(names = "--batchSize", description = "Number of documents to retrieve in each call to MarkLogic.") + @Parameter(names = "--batch-size", description = "Number of documents to retrieve in each call to MarkLogic.") private Integer batchSize = 100; - @Parameter(names = "--partitionsPerForest", description = "Number of partition readers to create for each forest.") + @Parameter(names = "--partitions-per-forest", description = "Number of partition readers to create for each forest.") private Integer partitionsPerForest = 4; @Override @@ -162,7 +162,7 @@ public static class WriteRdfFilesParams extends WriteFilesParams implements ReadD public static class Validator extends AtLeastOneValidator { public Validator() { - super("--query", "--uris", "--stringQuery", "--collections", "--directory"); + super("--query", "--uris", "--string-query", "--collections", "--directory"); } } - @Parameter(names = "--stringQuery", description = "A query utilizing the MarkLogic search grammar; " + + @Parameter(names = "--string-query", description = "A query utilizing the MarkLogic search grammar; " + "see https://docs.marklogic.com/guide/search-dev/string-query for more information.") private String stringQuery; @Parameter(names = "--uris", description = "Newline-delimited sequence of document URIs to retrieve. Can be combined " + - "with --collections, --directory, and --stringQuery. If specified, --query will be ignored.") + "with --collections, --directory, and --string-query. If specified, --query will be ignored.") private String uris; @Parameter(names = "--query", description = "A JSON or XML representation of a structured query, serialized CTS query, or combined query. " + @@ -47,16 +47,16 @@ public Validator() { @Parameter(names = "--transform", description = "Name of a MarkLogic REST API transform to apply to each matching document.") private String transform; - @Parameter(names = "--transformParams", description = "Comma-delimited sequence of transform parameter names and values - e.g. param1,value1,param2,value2.") + @Parameter(names = "--transform-params", description = "Comma-delimited sequence of transform parameter names and values - e.g. param1,value1,param2,value2.") private String transformParams; - @Parameter(names = "--transformParamsDelimiter", description = "Delimiter for transform parameters; defaults to a comma.") + @Parameter(names = "--transform-params-delimiter", description = "Delimiter for transform parameters; defaults to a comma.") private String transformParamsDelimiter; - @Parameter(names = "--batchSize", description = "Number of documents to retrieve in each call to MarkLogic.") + @Parameter(names = "--batch-size", description = "Number of documents to retrieve in each call to MarkLogic.") private Integer batchSize = 500; - @Parameter(names = "--partitionsPerForest", description = "Number of partition readers to create for each forest.") + @Parameter(names = "--partitions-per-forest", description = "Number of partition readers to create for each forest.") private Integer partitionsPerForest = 4; public Map makeOptions() { diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/export/ReadRowsParams.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/export/ReadRowsParams.java index 8314813e..43f830c5 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/export/ReadRowsParams.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/export/ReadRowsParams.java @@ -15,13 +15,13 @@ public class ReadRowsParams implements ReadRowsOptions { @Parameter(names = "--query", description = "The Optic DSL query for retrieving rows; must use op.fromView as an accessor.") private String query; - @Parameter(names = "--batchSize", description = "Approximate number of rows to retrieve in each call to MarkLogic; defaults to 100000.") + @Parameter(names = "--batch-size", description = "Approximate number of rows to retrieve in each call to MarkLogic; defaults to 100000.") private Integer batchSize; // Not yet showing this in usage as it is confusing for a typical user to understand and would only need to be // set if push down aggregation is producing incorrect results. See the MarkLogic Spark connector documentation // for more information. - @Parameter(names = "--disableAggregationPushDown", hidden = true) + @Parameter(names = "--disable-aggregation-push-down", hidden = true) private Boolean disableAggregationPushDown; @Parameter(names = "--partitions", description = "Number of partitions to create when reading rows from MarkLogic. " + diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/export/WriteFilesParams.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/export/WriteFilesParams.java index b99861e9..b89e9366 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/export/WriteFilesParams.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/export/WriteFilesParams.java @@ -17,7 +17,7 @@ public abstract class WriteFilesParams implements S @ParametersDelegate private S3Params s3Params = new S3Params(); - @Parameter(names = "--fileCount", description = "Specifies how many files should be written; also an alias for '--repartition'.") + @Parameter(names = "--file-count", description = "Specifies how many files should be written; also an alias for '--repartition'.") protected Integer fileCount; public String getPath() { diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/ImportAggregateXmlCommand.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/ImportAggregateXmlCommand.java index 9ea41ea7..3e192b04 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/ImportAggregateXmlCommand.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/ImportAggregateXmlCommand.java @@ -51,12 +51,12 @@ public static class ReadXmlFilesParams extends ReadFilesParams aggregationExpressions = new ArrayList<>(); diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/ImportJsonFilesCommand.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/ImportJsonFilesCommand.java index be662538..6609e30d 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/ImportJsonFilesCommand.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/ImportJsonFilesCommand.java @@ -40,7 +40,7 @@ protected WriteDocumentParams getWriteParams() { public static class ReadJsonFilesParams extends ReadFilesParams implements ReadJsonFilesOptions { @Parameter( - names = "--jsonLines", + names = "--json-lines", description = "Specifies that the file contains one JSON object per line, per the JSON Lines format defined at https://jsonlines.org/ ." ) private Boolean jsonLines; diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/ImportRdfFilesCommand.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/ImportRdfFilesCommand.java index 216c6906..253a9e4d 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/ImportRdfFilesCommand.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/ImportRdfFilesCommand.java @@ -73,7 +73,7 @@ public static class WriteTriplesDocumentsParams extends WriteDocumentParams implements ReadFilesOpt @Parameter(required = true, names = "--path", description = "Specify one or more path expressions for selecting files to import.") private List paths = new ArrayList<>(); - @Parameter(names = "--abortOnReadFailure", description = "Causes the command to abort when it fails to read a file.") + @Parameter(names = "--abort-on-read-failure", description = "Causes the command to abort when it fails to read a file.") private Boolean abortOnReadFailure = false; @Parameter(names = "--filter", description = "A glob filter for selecting only files with file names matching the pattern.") private String filter; - @Parameter(names = "--recursiveFileLookup", arity = 1, description = "If true, files will be loaded recursively from child directories and partition inferring is disabled.") + @Parameter(names = "--recursive-file-lookup", arity = 1, description = "If true, files will be loaded recursively from child directories and partition inferring is disabled.") private Boolean recursiveFileLookup = true; @ParametersDelegate diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/WriteDocumentParams.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/WriteDocumentParams.java index 321c28f7..5e8bec84 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/WriteDocumentParams.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/WriteDocumentParams.java @@ -17,13 +17,13 @@ public class WriteDocumentParams implements WriteDocumentsOptions, Supplier> { @Parameter( - names = "--abortOnWriteFailure", + names = "--abort-on-write-failure", description = "Include this option to cause the command to fail when a batch of documents cannot be written to MarkLogic." ) private Boolean abortOnWriteFailure; @Parameter( - names = "--batchSize", + names = "--batch-size", description = "The number of documents written in a call to MarkLogic." ) private Integer batchSize = 200; @@ -35,7 +35,7 @@ public class WriteDocumentParams implements Wri private String collections; @Parameter( - names = "--failedDocumentsPath", + names = "--failed-documents-path", description = "File path for writing an archive file containing failed documents and their metadata." ) private String failedDocumentsPath; @@ -47,19 +47,19 @@ public class WriteDocumentParams implements Wri private String permissions; @Parameter( - names = "--temporalCollection", + names = "--temporal-collection", description = "Name of a temporal collection to assign to each document." ) private String temporalCollection; @Parameter( - names = "--threadCount", + names = "--thread-count", description = "The number of threads used by each partition worker when writing batches of documents to MarkLogic." ) private Integer threadCount = 4; @Parameter( - names = "--totalThreadCount", + names = "--total-thread-count", description = "The total number of threads used across all partitions when writing batches of documents to MarkLogic." ) private Integer totalThreadCount; @@ -71,38 +71,38 @@ public class WriteDocumentParams implements Wri private String transform; @Parameter( - names = "--transformParams", + names = "--transform-params", description = "Comma-delimited string of REST API transform parameter names and values - e.g. param1,value1,param2,value2." ) private String transformParams; @Parameter( - names = "--transformParamsDelimiter", - description = "Delimiter to use instead of a comma for the '--transformParams' parameter." + names = "--transform-params-delimiter", + description = "Delimiter to use instead of a comma for the '--transform-params' parameter." ) private String transformParamsDelimiter; @Parameter( - names = "--uriPrefix", + names = "--uri-prefix", description = "String to prepend to each document URI." ) private String uriPrefix; @Parameter( - names = "--uriReplace", + names = "--uri-replace", description = "Modify the URI for a document via a comma-delimited list of regular expression " + "and replacement string pairs - e.g. regex,'value',regex,'value'. Each replacement string must be enclosed by single quotes." ) private String uriReplace; @Parameter( - names = "--uriSuffix", + names = "--uri-suffix", description = "String to append to each document URI." ) private String uriSuffix; @Parameter( - names = "--uriTemplate", + names = "--uri-template", description = "String defining a template for constructing each document URI. " + "See https://marklogic.github.io/marklogic-spark-connector/writing.html for more information." ) diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/WriteStructuredDocumentParams.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/WriteStructuredDocumentParams.java index 17e16a85..a661c6e7 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/WriteStructuredDocumentParams.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/importdata/WriteStructuredDocumentParams.java @@ -13,20 +13,20 @@ public class WriteStructuredDocumentParams extends WriteDocumentParams implements WriteStructuredDocumentsOptions { @Parameter( - names = "--jsonRootName", + names = "--json-root-name", description = "Name of a root field to add to each JSON document." ) private String jsonRootName; @Parameter( - names = "--xmlRootName", + names = "--xml-root-name", description = "Causes XML documents to be written instead of JSON, with the documents having a root element with this name." ) private String xmlRootName; @Parameter( - names = "--xmlNamespace", - description = "Namespace for the root element of XML documents as specified by '--xmlRootName'." + names = "--xml-namespace", + description = "Namespace for the root element of XML documents as specified by '--xml-root-name'." ) private String xmlNamespace; diff --git a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/reprocess/ReprocessCommand.java b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/reprocess/ReprocessCommand.java index 7113956c..6017dab5 100644 --- a/new-tool-cli/src/main/java/com/marklogic/newtool/impl/reprocess/ReprocessCommand.java +++ b/new-tool-cli/src/main/java/com/marklogic/newtool/impl/reprocess/ReprocessCommand.java @@ -64,7 +64,7 @@ public void validate(Map params) throws ParameterException { private void validateReadParams(Map params) { String[] readParams = new String[]{ - "--readInvoke", "--readJavascript", "--readXquery", "--readJavascriptFile", "--readXqueryFile" + "--read-invoke", "--read-javascript", "--read-xquery", "--read-javascript-file", "--read-xquery-file" }; if (getCountOfNonNullParams(params, readParams) != 1) { throw new ParameterException(makeErrorMessage("Must specify one of ", readParams)); @@ -73,8 +73,8 @@ private void validateReadParams(Map params) { private void validatePartitionParams(Map params) { String[] partitionParams = new String[]{ - "--readPartitionsInvoke", "--readPartitionsJavascript", "--readPartitionsXquery", - "--readPartitionsJavascriptFile", "--readPartitionsXqueryFile" + "--read-partitions-invoke", "--read-partitions-javascript", "--read-partitions-xquery", + "--read-partitions-javascript-file", "--read-partitions-xquery-file" }; if (getCountOfNonNullParams(params, partitionParams) > 1) { throw new ParameterException(makeErrorMessage("Can only specify one of ", partitionParams)); @@ -86,7 +86,7 @@ private void validateWriteParams(Map params) { return; } String[] writeParams = new String[]{ - "--writeInvoke", "--writeJavascript", "--writeXquery", "--writeJavascriptFile", "--writeXqueryFile" + "--write-invoke", "--write-javascript", "--write-xquery", "--write-javascript-file", "--write-xquery-file" }; if (getCountOfNonNullParams(params, writeParams) != 1) { throw new ParameterException(makeErrorMessage("Must specify one of ", writeParams)); @@ -113,68 +113,68 @@ private int getCountOfNonNullParams(Map params, String... paramN public static class ReadParams implements Supplier>, ReadOptions { @Parameter( - names = {"--readInvoke"}, + names = {"--read-invoke"}, description = "The path to a module to invoke for reading data; the module must be in your application’s modules database." ) private String readInvoke; @Parameter( - names = {"--readJavascript"}, + names = {"--read-javascript"}, description = "JavaScript code to execute for reading data." ) private String readJavascript; @Parameter( - names = {"--readJavascriptFile"}, + names = {"--read-javascript-file"}, description = "Local file containing JavaScript code to execute for reading data." ) private String readJavascriptFile; @Parameter( - names = {"--readXquery"}, + names = {"--read-xquery"}, description = "XQuery code to execute for reading data." ) private String readXquery; @Parameter( - names = {"--readXqueryFile"}, + names = {"--read-xquery-file"}, description = "Local file containing XQuery code to execute for reading data." ) private String readXqueryFile; @Parameter( - names = {"--readPartitionsInvoke"}, + names = {"--read-partitions-invoke"}, description = "The path to a module to invoke to define partitions that are sent to your custom code for reading; the module must be in your application’s modules database." ) private String readPartitionsInvoke; @Parameter( - names = {"--readPartitionsJavascript"}, + names = {"--read-partitions-javascript"}, description = "JavaScript code to execute to define partitions that are sent to your custom code for reading." ) private String readPartitionsJavascript; @Parameter( - names = {"--readPartitionsJavascriptFile"}, + names = {"--read-partitions-javascript-file"}, description = "Local file containing JavaScript code to execute to define partitions that are sent to your custom code for reading." ) private String readPartitionsJavascriptFile; @Parameter( - names = {"--readPartitionsXquery"}, + names = {"--read-partitions-xquery"}, description = "XQuery code to execute to define partitions that are sent to your custom code for reading." ) private String readPartitionsXquery; @Parameter( - names = {"--readPartitionsXqueryFile"}, + names = {"--read-partitions-xquery-file"}, description = "Local file containing XQuery code to execute to define partitions that are sent to your custom code for reading." ) private String readPartitionsXqueryFile; @Parameter( - names = "--readVar", variableArity = true, - description = "Define variables to be sent to the code for reading data; e.g. '--readVar var1=value1'." + names = "--read-var", variableArity = true, + description = "Define variables to be sent to the code for reading data; e.g. '--read-var var1=value1'." ) private List readVars = new ArrayList<>(); @@ -215,7 +215,7 @@ public Map get() { readVars.forEach(readVar -> { int pos = readVar.indexOf("="); if (pos < 0) { - throw new IllegalArgumentException("Value of --readVar argument must be 'varName=varValue'; invalid value: " + readVar); + throw new IllegalArgumentException("Value of --read-var argument must be 'varName=varValue'; invalid value: " + readVar); } options.put(Options.READ_VARS_PREFIX + readVar.substring(0, pos), readVar.substring(pos + 1)); }); @@ -296,61 +296,61 @@ public ReadOptions vars(Map namesAndValues) { public static class WriteParams implements Supplier>, WriteOptions { @Parameter( - names = {"--writeInvoke"}, + names = {"--write-invoke"}, description = "The path to a module to invoke for writing data; the module must be in your application’s modules database." ) private String writeInvoke; @Parameter( - names = {"--writeJavascript"}, + names = {"--write-javascript"}, description = "JavaScript code to execute for writing data." ) private String writeJavascript; @Parameter( - names = {"--writeJavascriptFile"}, + names = {"--write-javascript-file"}, description = "Local file containing JavaScript code to execute for writing data." ) private String writeJavascriptFile; @Parameter( - names = {"--writeXquery"}, + names = {"--write-xquery"}, description = "XQuery code to execute for writing data." ) private String writeXquery; @Parameter( - names = {"--writeXqueryFile"}, + names = {"--write-xquery-file"}, description = "Local file containing XQuery code to execute for writing data." ) private String writeXqueryFile; @Parameter( - names = {"--externalVariableName"}, + names = {"--external-variable-name"}, description = "Name of the external variable in the custom code for writing that will be populated with each value read from MarkLogic." ) private String externalVariableName = "URI"; @Parameter( - names = {"--externalVariableDelimiter"}, + names = {"--external-variable-delimiter"}, description = "Delimiter used when multiple values are included in the external variable in the code for writing." ) private String externalVariableDelimiter = ","; @Parameter( - names = "--writeVar", variableArity = true, - description = "Define variables to be sent to the code for writing data; e.g. '--writeVar var1=value1'." + names = "--write-var", variableArity = true, + description = "Define variables to be sent to the code for writing data; e.g. '--write-var var1=value1'." ) private List writeVars = new ArrayList<>(); @Parameter( - names = "--abortOnWriteFailure", + names = "--abort-on-write-failure", description = "Include this option to cause the command to fail when a batch of documents cannot be written to MarkLogic." ) private Boolean abortOnWriteFailure; @Parameter( - names = "--batchSize", + names = "--batch-size", description = "The number of values sent to the code for writing data in a single call." ) private Integer batchSize = 1; @@ -381,7 +381,7 @@ public Map get() { writeVars.forEach(writeVar -> { int pos = writeVar.indexOf("="); if (pos < 0) { - throw new IllegalArgumentException("Value of --writeVar argument must be 'varName=varValue'; invalid value: " + writeVar); + throw new IllegalArgumentException("Value of --write-var argument must be 'varName=varValue'; invalid value: " + writeVar); } options.put(Options.WRITE_VARS_PREFIX + writeVar.substring(0, pos), writeVar.substring(pos + 1)); }); diff --git a/new-tool-cli/src/main/resources/marklogic-spark-messages_en.properties b/new-tool-cli/src/main/resources/marklogic-spark-messages_en.properties index 06c6159d..7e0b7f7b 100644 --- a/new-tool-cli/src/main/resources/marklogic-spark-messages_en.properties +++ b/new-tool-cli/src/main/resources/marklogic-spark-messages_en.properties @@ -2,18 +2,19 @@ # Overridden here so that the user sees meaningful CLI option names instead of meaningless connector option names. # The use of "_en" allows this to take precedence over the marklogic-spark-messages.properties file in the connector, # while still inheriting anything from that file that is not overridden. -spark.marklogic.client.uri=--connectionString -spark.marklogic.read.batchSize=--batchSize -spark.marklogic.read.documents.partitionsPerForest=--partitionsPerForest +spark.marklogic.client.uri=--connection-string +spark.marklogic.read.batchSize=--batch-size +spark.marklogic.read.documents.partitionsPerForest=--partitions-per-forest spark.marklogic.read.numPartitions=--partitions -spark.marklogic.write.batchSize=--batchSize -spark.marklogic.write.documentType=--documentType -spark.marklogic.write.fileRows.documentType=--documentType +spark.marklogic.write.batchSize=--batch-size +spark.marklogic.write.documentType=--document-type +spark.marklogic.write.fileRows.documentType=--document-type spark.marklogic.write.graph=--graph -spark.marklogic.write.graphOverride=--graphOverride -spark.marklogic.write.jsonRootName=--jsonRootName -spark.marklogic.write.threadCount=--threadCount -spark.marklogic.write.totalThreadCount=--totalThreadCount -spark.marklogic.write.transformParams=--transformParams -spark.marklogic.write.uriTemplate=--uriTemplate -spark.marklogic.write.xmlRootName=--xmlRootName +spark.marklogic.write.graphOverride=--graph-override +spark.marklogic.write.jsonRootName=--json-root-name +spark.marklogic.write.threadCount=--thread-count +spark.marklogic.write.totalThreadCount=--total-thread-count +spark.marklogic.write.transformParams=--transform-params +spark.marklogic.write.uriTemplate=--uri-template +spark.marklogic.write.xmlRootName=--xml-root-name + diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/api/ConnectionTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/api/ConnectionTest.java index 2d5576f0..a4fde8a2 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/api/ConnectionTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/api/ConnectionTest.java @@ -14,7 +14,7 @@ *

* For the "missing value" tests, turns out that since our connector throws a good exception, we don't need any * additional support in the API. It's really just the connection string that we need to validate right away so that - * we can provide an error message that doesn't include "--connectionString" in it. + * we can provide an error message that doesn't include "--connection-string" in it. */ class ConnectionTest extends AbstractTest { diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/api/ParquetFilesExporterTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/api/ParquetFilesExporterTest.java index 923fb44a..99ec7a71 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/api/ParquetFilesExporterTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/api/ParquetFilesExporterTest.java @@ -57,7 +57,7 @@ void pathOnly(@TempDir Path tempDir) { private void verifyFiles(Path tempDir) { File[] files = tempDir.toFile().listFiles(file -> file.getName().endsWith(".gz.parquet")); - assertEquals(2, files.length, "Expecting 2 gzipped Parquet files since --fileCount is 2, and the " + + assertEquals(2, files.length, "Expecting 2 gzipped Parquet files since --file-count is 2, and the " + "-Pcompression option should tell Spark Parquet to use gzip instead of snappy."); } } diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ConfigureSparkMasterUrlTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ConfigureSparkMasterUrlTest.java index 29179fd7..bb33580f 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ConfigureSparkMasterUrlTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ConfigureSparkMasterUrlTest.java @@ -25,7 +25,7 @@ void validMasterUrl() { "import-parquet-files", "--master-url", "local[2]", "--path", "src/test/resources/parquet/individual/cars.parquet", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "parquet-test" ); @@ -40,7 +40,7 @@ void invalidMasterUrl() { "import-parquet-files", "--master-url", "just-not-valid-at-all", "--path", "src/test/resources/parquet/individual/cars.parquet", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS ), "Command failed, cause: Could not parse Master URL: 'just-not-valid-at-all'"); } diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ConnectionParamsTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ConnectionParamsTest.java index ab55155e..3af11db6 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ConnectionParamsTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ConnectionParamsTest.java @@ -13,31 +13,31 @@ void allConnectionParams() { ImportFilesCommand command = (ImportFilesCommand) getCommand( "import-files", "--path", "/doesnt/matter/for-this-test", - "--connectionString", "user:password@host:8000", + "--connection-string", "user:password@host:8000", "--host", "localhost", "--port", "8123", - "--disableGzippedResponses", - "--basePath", "/path", + "--disable-gzipped-responses", + "--base-path", "/path", "--database", "somedb", - "--connectionType", "direct", - "--authType", "basic", + "--connection-type", "direct", + "--auth-type", "basic", "--username", "jane", "--password", "secret", - "--certificateFile", "my.jks", - "--certificatePassword", "pwd123", - "--cloudApiKey", "key123", - "--kerberosPrincipal", "prince123", - "--samlToken", "my-token", - "--sslProtocol", "TLSv1.3", - "--sslHostnameVerifier", "STRICT", - "--keyStorePath", "key.jks", - "--keyStorePassword", "keypass", - "--keyStoreType", "JKS", - "--keyStoreAlgorithm", "SunX509", - "--trustStorePath", "trust.jks", - "--trustStorePassword", "trustpass", - "--trustStoreType", "PKCS", - "--trustStoreAlgorithm", "SunX510" + "--certificate-file", "my.jks", + "--certificate-password", "pwd123", + "--cloud-api-key", "key123", + "--kerberos-principal", "prince123", + "--saml-token", "my-token", + "--ssl-protocol", "TLSv1.3", + "--ssl-hostname-verifier", "STRICT", + "--keystore-path", "key.jks", + "--keystore-password", "keypass", + "--keystore-type", "JKS", + "--keystore-algorithm", "SunX509", + "--truststore-path", "trust.jks", + "--truststore-password", "trustpass", + "--truststore-type", "PKCS", + "--truststore-algorithm", "SunX510" ); assertOptions(command.getConnectionParams().makeOptions(), diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ErrorMessagesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ErrorMessagesTest.java index 64e783c7..a460bce0 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ErrorMessagesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ErrorMessagesTest.java @@ -16,20 +16,20 @@ void verifyEachKeyIsOverridden() { "Expecting 15 keys as of the upcoming 2.3.0 release. Bump this up as more keys are added. Each key should " + "also be verified in an assertion below."); - assertEquals("--connectionString", bundle.getString(Options.CLIENT_URI)); - assertEquals("--batchSize", bundle.getString(Options.READ_BATCH_SIZE)); - assertEquals("--partitionsPerForest", bundle.getString(Options.READ_DOCUMENTS_PARTITIONS_PER_FOREST)); + assertEquals("--connection-string", bundle.getString(Options.CLIENT_URI)); + assertEquals("--batch-size", bundle.getString(Options.READ_BATCH_SIZE)); + assertEquals("--partitions-per-forest", bundle.getString(Options.READ_DOCUMENTS_PARTITIONS_PER_FOREST)); assertEquals("--partitions", bundle.getString(Options.READ_NUM_PARTITIONS)); - assertEquals("--batchSize", bundle.getString(Options.WRITE_BATCH_SIZE)); - assertEquals("--documentType", bundle.getString(Options.WRITE_DOCUMENT_TYPE)); - assertEquals("--documentType", bundle.getString(Options.WRITE_FILE_ROWS_DOCUMENT_TYPE)); + assertEquals("--batch-size", bundle.getString(Options.WRITE_BATCH_SIZE)); + assertEquals("--document-type", bundle.getString(Options.WRITE_DOCUMENT_TYPE)); + assertEquals("--document-type", bundle.getString(Options.WRITE_FILE_ROWS_DOCUMENT_TYPE)); assertEquals("--graph", bundle.getString(Options.WRITE_GRAPH)); - assertEquals("--graphOverride", bundle.getString(Options.WRITE_GRAPH_OVERRIDE)); - assertEquals("--jsonRootName", bundle.getString(Options.WRITE_JSON_ROOT_NAME)); - assertEquals("--threadCount", bundle.getString(Options.WRITE_THREAD_COUNT)); - assertEquals("--totalThreadCount", bundle.getString(Options.WRITE_TOTAL_THREAD_COUNT)); - assertEquals("--transformParams", bundle.getString(Options.WRITE_TRANSFORM_PARAMS)); - assertEquals("--uriTemplate", bundle.getString(Options.WRITE_URI_TEMPLATE)); - assertEquals("--xmlRootName", bundle.getString(Options.WRITE_XML_ROOT_NAME)); + assertEquals("--graph-override", bundle.getString(Options.WRITE_GRAPH_OVERRIDE)); + assertEquals("--json-root-name", bundle.getString(Options.WRITE_JSON_ROOT_NAME)); + assertEquals("--thread-count", bundle.getString(Options.WRITE_THREAD_COUNT)); + assertEquals("--total-thread-count", bundle.getString(Options.WRITE_TOTAL_THREAD_COUNT)); + assertEquals("--transform-params", bundle.getString(Options.WRITE_TRANSFORM_PARAMS)); + assertEquals("--uri-template", bundle.getString(Options.WRITE_URI_TEMPLATE)); + assertEquals("--xml-root-name", bundle.getString(Options.WRITE_XML_ROOT_NAME)); } } diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ExportRdfFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ExportRdfFilesTest.java index 89e68c65..1563a74e 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ExportRdfFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ExportRdfFilesTest.java @@ -18,18 +18,18 @@ void importThenExportThenImport(@TempDir Path tempDir) { run( "import-rdf-files", "--path", "src/test/resources/rdf/englishlocale.ttl", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "my-triples" ); run( "export-rdf-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--graphs", "my-triples", "--path", tempDir.toFile().getAbsolutePath(), "--format", "nq", - "--fileCount", "1" + "--file-count", "1" ); File[] files = tempDir.toFile().listFiles(); @@ -40,7 +40,7 @@ void importThenExportThenImport(@TempDir Path tempDir) { run( "import-rdf-files", "--path", tempDir.toFile().getAbsolutePath(), - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "more-triples" ); @@ -58,12 +58,12 @@ void importThenExportThenImport(@TempDir Path tempDir) { void missingQueryInput(@TempDir Path tempDir) { String stderr = runAndReturnStderr(() -> run( "export-rdf-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--path", tempDir.toFile().getAbsolutePath() )); assertTrue(stderr.contains("Must specify at least one of the following options: " + - "[--graphs, --query, --uris, --stringQuery, --collections, --directory]."), + "[--graphs, --query, --uris, --string-query, --collections, --directory]."), "Unexpected stderr: " + stderr); } } diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/HandleErrorTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/HandleErrorTest.java index 26af7e42..dfda7bfa 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/HandleErrorTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/HandleErrorTest.java @@ -15,7 +15,7 @@ class HandleErrorTest extends AbstractTest { @Test void invalidCommand() { assertStderrContains( - () -> run("not_a_real_command", "--connectionString", makeConnectionString()), + () -> run("not_a_real_command", "--connection-string", makeConnectionString()), "Invalid command name: not_a_real_command" ); } @@ -47,7 +47,7 @@ void badDynamicOption() { @Test void missingRequiredParam() { assertStderrContains( - () -> run("import-files", "--connectionString", makeConnectionString()), + () -> run("import-files", "--connection-string", makeConnectionString()), "The following option is required: [--path]" ); } @@ -61,7 +61,7 @@ void sparkFailure() { () -> run( "import-files", "--path", "/not/valid", - "--connectionString", makeConnectionString() + "--connection-string", makeConnectionString() ), "Command failed, cause: [PATH_NOT_FOUND] Path does not exist: file:/not/valid." ); @@ -74,9 +74,9 @@ void abortOnWriteFailure() { "import-files", "--path", "src/test/resources/mixed-files/hello*", "--repartition", "2", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", "invalid-role,read,rest-writer,update", - "--abortOnWriteFailure" + "--abort-on-write-failure" ), "Command failed, cause: Local message: failed to apply resource at documents" ); @@ -89,10 +89,10 @@ void abortOnWriteFailureAndShowStacktrace() { "import-files", "--path", "src/test/resources/mixed-files/hello*", "--repartition", "2", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", "invalid-role,read,rest-writer,update", "--stacktrace", - "--abortOnWriteFailure" + "--abort-on-write-failure" ), "com.marklogic.spark.ConnectorException: Local message: failed to apply resource at documents" ); @@ -105,7 +105,7 @@ void dontAbortOnWriteFailure() { "--path", "src/test/resources/mixed-files/hello*", // Using two partitions to verify that both partition writers log an error. "--repartition", "2", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", "invalid-role,read,rest-writer,update" )); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/LimitTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/LimitTest.java index 57a4675b..0d800746 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/LimitTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/LimitTest.java @@ -28,7 +28,7 @@ private void importFiles(int limit) { run( "import-files", "--path", "src/test/resources/mixed-files/hello*", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", COLLECTION, "--limit", Integer.toString(limit) diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ValidateMarkLogicConnectionTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ValidateMarkLogicConnectionTest.java index 76238a86..b8f20559 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ValidateMarkLogicConnectionTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/ValidateMarkLogicConnectionTest.java @@ -14,7 +14,7 @@ class ValidateMarkLogicConnectionTest extends AbstractTest { void noHost() { assertStderrContains( () -> run("import-files", "--path", "src/test/resources/mixed-files"), - "Must specify a MarkLogic host via --host or --connectionString." + "Must specify a MarkLogic host via --host or --connection-string." ); } @@ -22,7 +22,7 @@ void noHost() { void noPort() { assertStderrContains( () -> run("import-files", "--path", "src/test/resources/mixed-files", "--host", getDatabaseClient().getHost()), - "Must specify a MarkLogic app server port via --port or --connectionString." + "Must specify a MarkLogic app server port via --port or --connection-string." ); } @@ -43,10 +43,10 @@ void badConnectionString() { String output = runAndReturnStderr(() -> run( "import-files", "--path", "src/test/resources/mixed-files", - "--connectionString", "admin-missing-password@localhost:8003" + "--connection-string", "admin-missing-password@localhost:8003" )); - assertTrue(output.contains("Invalid value for --connectionString; must be username:password@host:port"), + assertTrue(output.contains("Invalid value for --connection-string; must be username:password@host:port"), "Unexpected output: " + output + "; this test also confirms that the ETL tool is overriding " + "error messages from the connector so that CLI option names appear instead of connector " + "option names. This is also confirmed by ErrorMessagesTest."); @@ -57,10 +57,10 @@ void connectionStringWithoutUserOrPassword() { String output = runAndReturnStderr(() -> run( "import-files", "--path", "src/test/resources/mixed-files", - "--connectionString", "localhost:8003" + "--connection-string", "localhost:8003" )); - assertTrue(output.contains("Invalid value for --connectionString; must be username:password@host:port"), + assertTrue(output.contains("Invalid value for --connection-string; must be username:password@host:port"), "Unexpected output: " + output); } } diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/VerifyOptionNamesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/VerifyOptionNamesTest.java new file mode 100644 index 00000000..5249b5e5 --- /dev/null +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/VerifyOptionNamesTest.java @@ -0,0 +1,79 @@ +package com.marklogic.newtool.impl; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.springframework.util.FileCopyUtils; + +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.nio.file.FileVisitResult; +import java.nio.file.FileVisitor; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +class VerifyOptionNamesTest { + + /** + * Verifies that each option found in the listed directories do not contain any option names starting with "--" + * and followed by any number of uppercase characters. All option names are expected to be lowercase-hyphenated. + * + * @throws IOException + */ + @ParameterizedTest + @ValueSource(strings = { + "src", + "../docs", + "../client-project", + "../examples", + "../mlcp-testing" + }) + void test(String path) throws IOException { + final File dir = new File(path); + AtomicInteger count = new AtomicInteger(0); + Files.walkFileTree(dir.toPath(), new FileVisitor<>() { + @Override + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) { + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) throws IOException { + try (FileReader reader = new FileReader(path.toFile())) { + String text = FileCopyUtils.copyToString(reader); + Pattern pattern = Pattern.compile("\\-\\-[a-zA-Z]+"); + Matcher matcher = pattern.matcher(text); + while (matcher.find()) { + String group = matcher.group(); + for (int i = 0; i < group.length(); i++) { + if (Character.isUpperCase(group.charAt(i))) { + throw new RuntimeException(String.format("Found option starting with '--' that " + + "contains an uppercase character: %s; file: %s", group, path.toFile())); + } + } + count.incrementAndGet(); + } + } + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult visitFileFailed(Path path, IOException exc) throws IOException { + throw exc; + } + + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + return FileVisitResult.CONTINUE; + } + }); + System.out.println(String.format("Found %d options in directory: %s", count.get(), dir)); + assertTrue(count.get() > 0); + } +} diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/copy/CopyOptionsTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/copy/CopyOptionsTest.java index 413e2ed4..8f425f5a 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/copy/CopyOptionsTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/copy/CopyOptionsTest.java @@ -15,8 +15,8 @@ class CopyOptionsTest extends AbstractOptionsTest { @Test void useOutputParamsForConnection() { CopyCommand command = (CopyCommand) getCommand("copy", - "--connectionString", "test:test@test:8000", - "--outputConnectionString", "user:password@host:8000", + "--connection-string", "test:test@test:8000", + "--output-connection-string", "user:password@host:8000", "--collections", "anything" ); @@ -29,7 +29,7 @@ void useOutputParamsForConnection() { @Test void useRegularConnectionParamsIfNoOutputConnectionParams() { CopyCommand command = (CopyCommand) getCommand("copy", - "--connectionString", "test:test@test:8000", + "--connection-string", "test:test@test:8000", "--collections", "anything" ); @@ -42,22 +42,22 @@ void useRegularConnectionParamsIfNoOutputConnectionParams() { @Test void allWriteParams() { CopyCommand command = (CopyCommand) getCommand("copy", - "--connectionString", "someone:word@somehost:7000", + "--connection-string", "someone:word@somehost:7000", "--collections", "anything", - "--outputAbortOnWriteFailure", - "--outputBatchSize", "123", - "--outputCollections", "c1,c2", - "--outputFailedDocumentsPath", "/my/failures", - "--outputPermissions", "rest-reader,read,qconsole-user,update", - "--outputTemporalCollection", "t1", - "--outputThreadCount", "7", - "--outputTransform", "transform1", - "--outputTransformParams", "p1;v1;p2;v2", - "--outputTransformParamsDelimiter", ";", - "--outputUriPrefix", "/prefix/", - "--outputUriReplace", ".*data,''", - "--outputUriSuffix", ".xml", - "--outputUriTemplate", "/{example}.xml" + "--output-abort-on-write-failure", + "--output-batch-size", "123", + "--output-collections", "c1,c2", + "--output-failed-documents-path", "/my/failures", + "--output-permissions", "rest-reader,read,qconsole-user,update", + "--output-temporal-collection", "t1", + "--output-thread-count", "7", + "--output-transform", "transform1", + "--output-transform-params", "p1;v1;p2;v2", + "--output-transform-params-delimiter", ";", + "--output-uri-prefix", "/prefix/", + "--output-uri-replace", ".*data,''", + "--output-uri-suffix", ".xml", + "--output-uri-template", "/{example}.xml" ); assertOptions(command.writeParams.makeOptions(), @@ -82,32 +82,32 @@ void allWriteParams() { void allOutputConnectionParams() { CopyCommand command = (CopyCommand) getCommand( "copy", - "--connectionString", "someone:word@somehost:7000", + "--connection-string", "someone:word@somehost:7000", "--collections", "anything", - "--outputConnectionString", "user:password@host:8000", - "--outputHost", "localhost", - "--outputPort", "8123", - "--outputBasePath", "/path", - "--outputDatabase", "somedb", - "--outputConnectionType", "direct", - "--outputAuthType", "basic", - "--outputUsername", "jane", - "--outputPassword", "secret", - "--outputCertificateFile", "my.jks", - "--outputCertificatePassword", "pwd123", - "--outputCloudApiKey", "key123", - "--outputKerberosPrincipal", "prince123", - "--outputSamlToken", "my-token", - "--outputSslProtocol", "TLSv1.3", - "--outputSslHostnameVerifier", "STRICT", - "--outputKeyStorePath", "key.jks", - "--outputKeyStorePassword", "keypass", - "--outputKeyStoreType", "JKS", - "--outputKeyStoreAlgorithm", "SunX509", - "--outputTrustStorePath", "trust.jks", - "--outputTrustStorePassword", "trustpass", - "--outputTrustStoreType", "PKCS", - "--outputTrustStoreAlgorithm", "SunX510" + "--output-connection-string", "user:password@host:8000", + "--output-host", "localhost", + "--output-port", "8123", + "--output-base-path", "/path", + "--output-database", "somedb", + "--output-connection-type", "direct", + "--output-auth-type", "basic", + "--output-username", "jane", + "--output-password", "secret", + "--output-certificate-file", "my.jks", + "--output-certificate-password", "pwd123", + "--output-cloud-api-key", "key123", + "--output-kerberos-principal", "prince123", + "--output-saml-token", "my-token", + "--output-ssl-protocol", "TLSv1.3", + "--output-ssl-hostname-verifier", "STRICT", + "--output-keystore-path", "key.jks", + "--output-keystore-password", "keypass", + "--output-keystore-type", "JKS", + "--output-keystore-algorithm", "SunX509", + "--output-truststore-path", "trust.jks", + "--output-truststore-password", "trustpass", + "--output-truststore-type", "PKCS", + "--output-truststore-algorithm", "SunX510" ); assertOptions(command.makeOutputConnectionOptions(), diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/copy/CopyTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/copy/CopyTest.java index a4d77a26..886e21c0 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/copy/CopyTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/copy/CopyTest.java @@ -16,10 +16,10 @@ void sameDatabase() { "copy", "--categories", "content", "--collections", "author", - "--connectionString", makeConnectionString(), - "--outputCollections", "author-copies", - "--outputUriPrefix", "/copied", - "--outputPermissions", DEFAULT_PERMISSIONS + "--connection-string", makeConnectionString(), + "--output-collections", "author-copies", + "--output-uri-prefix", "/copied", + "--output-permissions", DEFAULT_PERMISSIONS ); assertCollectionSize("author", 15); @@ -33,10 +33,10 @@ void withUris() { "copy", "--categories", "content", "--uris", "/author/author1.json\n/author/author2.json", - "--connectionString", makeConnectionString(), - "--outputCollections", "author-copies", - "--outputUriPrefix", "/copied", - "--outputPermissions", DEFAULT_PERMISSIONS + "--connection-string", makeConnectionString(), + "--output-collections", "author-copies", + "--output-uri-prefix", "/copied", + "--output-permissions", DEFAULT_PERMISSIONS ); assertCollectionSize("author", 15); @@ -49,13 +49,13 @@ void sameDatabaseWithMetadata() { run( "copy", "--collections", "author", - "--partitionsPerForest", "1", + "--partitions-per-forest", "1", "--categories", "content,metadata", - "--connectionString", makeConnectionString(), - "--outputConnectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), + "--output-connection-string", makeConnectionString(), // No need to specify permissions since they are included via "--categories". - "--outputCollections", "author-copies", - "--outputUriPrefix", "/copied" + "--output-collections", "author-copies", + "--output-uri-prefix", "/copied" ); assertCollectionSize("author", 15); @@ -68,9 +68,9 @@ void badConnectionString() { assertStderrContains(() -> run( "copy", "--collections", "author", - "--connectionString", makeConnectionString(), - "--outputConnectionString", "not@valid" - ), "Invalid value for --outputConnectionString; must be username:password@host:port/optionalDatabaseName"); + "--connection-string", makeConnectionString(), + "--output-connection-string", "not@valid" + ), "Invalid value for --output-connection-string; must be username:password@host:port/optionalDatabaseName"); } @Test @@ -78,9 +78,9 @@ void missingHost() { assertStderrContains(() -> run( "copy", "--collections", "author", - "--connectionString", makeConnectionString(), - "--outputPort", "8000" - ), "Must specify a MarkLogic host via --outputHost or --outputConnectionString."); + "--connection-string", makeConnectionString(), + "--output-port", "8000" + ), "Must specify a MarkLogic host via --output-host or --output-connection-string."); } @Test @@ -88,9 +88,9 @@ void missingPort() { assertStderrContains(() -> run( "copy", "--collections", "author", - "--connectionString", makeConnectionString(), - "--outputHost", "localhost" - ), "Must specify a MarkLogic app server port via --outputPort or --outputConnectionString."); + "--connection-string", makeConnectionString(), + "--output-host", "localhost" + ), "Must specify a MarkLogic app server port via --output-port or --output-connection-string."); } @Test @@ -98,10 +98,10 @@ void missingUsername() { assertStderrContains(() -> run( "copy", "--collections", "author", - "--connectionString", makeConnectionString(), - "--outputHost", "localhost", - "--outputPort", "8000" - ), "Must specify a MarkLogic user via --outputUsername when using 'BASIC' or 'DIGEST' authentication."); + "--connection-string", makeConnectionString(), + "--output-host", "localhost", + "--output-port", "8000" + ), "Must specify a MarkLogic user via --output-username when using 'BASIC' or 'DIGEST' authentication."); } @Test @@ -109,11 +109,11 @@ void missingPassword() { assertStderrContains(() -> run( "copy", "--collections", "author", - "--connectionString", makeConnectionString(), - "--outputHost", "localhost", - "--outputPort", "8000", - "--outputUsername", "someone" - ), "Must specify a password via --outputPassword when using 'BASIC' or 'DIGEST' authentication."); + "--connection-string", makeConnectionString(), + "--output-host", "localhost", + "--output-port", "8000", + "--output-username", "someone" + ), "Must specify a password via --output-password when using 'BASIC' or 'DIGEST' authentication."); } private void assertDirectoryCount(String directoryPrefix, int expectedCount) { diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/custom/CustomExportDocumentsTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/custom/CustomExportDocumentsTest.java index 0464fd15..b00b8672 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/custom/CustomExportDocumentsTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/custom/CustomExportDocumentsTest.java @@ -12,7 +12,7 @@ class CustomExportDocumentsTest extends AbstractTest { void test() { run( "custom-export-documents", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--collections", "author", "--target", "marklogic", String.format("-P%s=%s", Options.CLIENT_URI, makeConnectionString()), diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/custom/CustomExportRowsTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/custom/CustomExportRowsTest.java index 7cfe11e9..a6ad0d00 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/custom/CustomExportRowsTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/custom/CustomExportRowsTest.java @@ -17,7 +17,7 @@ class CustomExportRowsTest extends AbstractTest { void sparkXml(@TempDir Path tempDir) throws IOException { run( "custom-export-rows", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--query", READ_AUTHORS_OPTIC_QUERY, "--target", "xml", "--repartition", "1", diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/custom/CustomImportTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/custom/CustomImportTest.java index 5d851469..a1b775b6 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/custom/CustomImportTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/custom/CustomImportTest.java @@ -20,12 +20,12 @@ void parquet() { "custom-import", "--source", "parquet", "-Ppath=src/test/resources/parquet/individual/cars.parquet", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "parquet-test", - "--xmlRootName", "my-parquet", - "--xmlNamespace", "org:example", - "--uriTemplate", "/parquet/{model}.xml" + "--xml-root-name", "my-parquet", + "--xml-namespace", "org:example", + "--uri-template", "/parquet/{model}.xml" ); assertCollectionSize("parquet-test", 32); @@ -40,11 +40,11 @@ void avro() { "custom-import", "--source", "avro", "-Ppath=src/test/resources/avro", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "avro-test", - "--uriTemplate", "/avro/{/avroData/color}.json", - "--jsonRootName", "avroData" + "--uri-template", "/avro/{/avroData/color}.json", + "--json-root-name", "avroData" ); assertCollectionSize("avro-test", 6); @@ -60,10 +60,10 @@ void csvWithDynamicParam() { "-Ppath=src/test/resources/delimited-files/semicolon-delimiter.csv", "-Pdelimiter=;", "-Pheader=true", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "csv-test", - "--uriTemplate", "/csv/{number}.json" + "--uri-template", "/csv/{number}.json" ); assertCollectionSize("csv-test", 3); @@ -82,10 +82,10 @@ void sparkXml() { "--source", "xml", "-Ppath=src/test/resources/xml-file/people.xml", "-ProwTag=person", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "spark-data", - "--uriTemplate", "/company/{company}.json" + "--uri-template", "/company/{company}.json" ); assertCollectionSize("spark-data", 3); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportArchiveFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportArchiveFilesTest.java index 4babf01c..1f745249 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportArchiveFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportArchiveFilesTest.java @@ -15,9 +15,9 @@ class ExportArchiveFilesTest extends AbstractTest { void test(@TempDir Path tempDir) { run( "export-archive-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--collections", "author", - "--fileCount", "1", + "--file-count", "1", "--path", tempDir.toFile().getAbsolutePath() ); @@ -29,9 +29,9 @@ void test(@TempDir Path tempDir) { run( "import-archive-files", "--path", tempDir.toFile().getAbsolutePath(), - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--collections", "imported-author", - "--uriPrefix", "/imported" + "--uri-prefix", "/imported" ); assertCollectionSize("Being able to read these URIs verifies that the metadata was exported and imported " + @@ -46,7 +46,7 @@ void test(@TempDir Path tempDir) { void contentShouldAlwaysBeIncluded(@TempDir Path tempDir) { String stderr = runAndReturnStderr(() -> run( "export-archive-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--collections", "author", "--path", tempDir.toFile().getAbsolutePath(), "--categories", "collections,permissions" @@ -58,9 +58,9 @@ void contentShouldAlwaysBeIncluded(@TempDir Path tempDir) { run( "import-archive-files", "--path", tempDir.toFile().getAbsolutePath(), - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--collections", "imported-author", - "--uriPrefix", "/imported" + "--uri-prefix", "/imported" ); assertCollectionSize("The export command should always include content, even when --categories is used " + diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportAvroFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportAvroFilesTest.java index 994adb0c..c36f8502 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportAvroFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportAvroFilesTest.java @@ -15,7 +15,7 @@ class ExportAvroFilesTest extends AbstractTest { void test(@TempDir Path tempDir) { run( "export-avro-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--query", READ_AUTHORS_OPTIC_QUERY, "--partitions", "4", "--path", tempDir.toFile().getAbsolutePath() @@ -29,7 +29,7 @@ void test(@TempDir Path tempDir) { run( "import-avro-files", "--path", tempDir.toFile().getAbsolutePath(), - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "avro-test" ); @@ -48,7 +48,7 @@ void test(@TempDir Path tempDir) { void dynamicParameter(@TempDir Path tempDir) { String stderr = runAndReturnStderr(() -> run( "export-avro-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--query", READ_AUTHORS_OPTIC_QUERY, "--partitions", "2", "--path", tempDir.toFile().getAbsolutePath(), diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportDelimitedFilesCommandTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportDelimitedFilesCommandTest.java index 17923444..41e84bd4 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportDelimitedFilesCommandTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportDelimitedFilesCommandTest.java @@ -18,11 +18,11 @@ class ExportDelimitedFilesCommandTest extends AbstractTest { void test(@TempDir Path tempDir) throws IOException { run( "export-delimited-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--partitions", "1", "--query", "op.fromView('Medical', 'Authors', '').orderBy(op.asc(op.col('LastName')))", "--path", tempDir.toFile().getAbsolutePath(), - "--fileCount", "1" + "--file-count", "1" ); File[] files = tempDir.toFile().listFiles((dir, name) -> name.endsWith(".csv")); @@ -39,12 +39,12 @@ void test(@TempDir Path tempDir) throws IOException { void headerRemovedViaDynamicParam(@TempDir Path tempDir) throws IOException { run( "export-delimited-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--partitions", "1", "--query", "op.fromView('Medical', 'Authors', '').orderBy(op.asc(op.col('LastName')))", "--path", tempDir.toFile().getAbsolutePath(), "-Pheader=false", - "--fileCount", "1" + "--file-count", "1" ); File[] files = tempDir.toFile().listFiles((dir, name) -> name.endsWith(".csv")); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportFilesOptionsTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportFilesOptionsTest.java index 123dd7ac..11a60017 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportFilesOptionsTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportFilesOptionsTest.java @@ -15,10 +15,10 @@ class ExportFilesOptionsTest extends AbstractOptionsTest { void prettyPrint() { ExportFilesCommand command = (ExportFilesCommand) getCommand( "export-files", - "--connectionString", "test:test@host:8000", + "--connection-string", "test:test@host:8000", "--collections", "anything", "--path", "anywhere", - "--prettyPrint", + "--pretty-print", "--compression", "gzip" ); @@ -31,7 +31,7 @@ void prettyPrint() { void dontPrettyPrint() { ExportFilesCommand command = (ExportFilesCommand) getCommand( "export-files", - "--connectionString", "test:test@host:8000", + "--connection-string", "test:test@host:8000", "--collections", "anything", "--path", "anywhere", "--compression", "zip" diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportFilesTest.java index 0119c224..c0325fa8 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportFilesTest.java @@ -25,11 +25,11 @@ void exportToRegularFiles(@TempDir Path tempDir) throws Exception { run( "export-files", "--path", tempDir.toFile().getAbsolutePath(), - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--collections", "author", // Including this simply to verify that it doesn't cause an error. Its impact is only going to be seen // in performance tests. - "--batchSize", "5" + "--batch-size", "5" ); File dir = tempDir.toFile(); @@ -53,7 +53,7 @@ void exportViaUris(@TempDir Path tempDir) { run( "export-files", "--path", tempDir.toFile().getAbsolutePath(), - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--uris", "/author/author1.json\n/author/author2.json" ); @@ -67,10 +67,10 @@ void exportViaUris(@TempDir Path tempDir) { void exportToZips(@TempDir Path tempDir) { run( "export-files", - "--partitionsPerForest", "1", + "--partitions-per-forest", "1", "--path", tempDir.toFile().getAbsolutePath(), "--compression", "zip", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--collections", "author" ); @@ -104,13 +104,13 @@ void exportToZipsWithRepartition(@TempDir Path tempDir) { "export-files", "--path", tempDir.toFile().getAbsolutePath(), "--compression", "zip", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--collections", "author", - "--zipFileCount", "5" + "--zip-file-count", "5" ); File dir = tempDir.toFile(); - assertEquals(5, dir.listFiles().length, "Should have 5 zip files instead of 3 due to the use of --zipFileCount."); + assertEquals(5, dir.listFiles().length, "Should have 5 zip files instead of 3 due to the use of --zip-file-count."); } @Test @@ -119,7 +119,7 @@ void exportToGZippedFiles(@TempDir Path tempDir) { "export-files", "--path", tempDir.toFile().getAbsolutePath(), "--compression", "gzip", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--collections", "author" ); @@ -141,12 +141,12 @@ void exportWithNoQuery(@TempDir Path tempDir) { "export-files", "--path", tempDir.toFile().getAbsolutePath(), "--compression", "gzip", - "--connectionString", makeConnectionString() + "--connection-string", makeConnectionString() ); }); assertTrue( - stderr.contains("Must specify at least one of the following options: [--query, --uris, --stringQuery, --collections, --directory]."), + stderr.contains("Must specify at least one of the following options: [--query, --uris, --string-query, --collections, --directory]."), "Unexpected stderr: " + stderr ); } diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportJdbcTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportJdbcTest.java index d8139e6b..6808d4aa 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportJdbcTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportJdbcTest.java @@ -66,10 +66,10 @@ void ignore() { void dynamicParam() { run( "export-jdbc", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--query", READ_AUTHORS_OPTIC_QUERY, - "--jdbcUrl", PostgresUtil.URL_WITH_AUTH, - "--jdbcDriver", "this should be overwritten by the dynamic param", + "--jdbc-url", PostgresUtil.URL_WITH_AUTH, + "--jdbc-driver", "this should be overwritten by the dynamic param", "--table", EXPORTED_TABLE_NAME, "-Pdriver=" + PostgresUtil.DRIVER ); @@ -85,10 +85,10 @@ private void exportFifteenAuthors() { private void exportFifteenAuthorsWithMode(String saveMode) { run( "export-jdbc", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--query", READ_AUTHORS_OPTIC_QUERY, - "--jdbcUrl", PostgresUtil.URL_WITH_AUTH, - "--jdbcDriver", PostgresUtil.DRIVER, + "--jdbc-url", PostgresUtil.URL_WITH_AUTH, + "--jdbc-driver", PostgresUtil.DRIVER, "--table", EXPORTED_TABLE_NAME, "--mode", saveMode ); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportJsonLinesFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportJsonLinesFilesTest.java index bcd01aaf..3857c3f6 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportJsonLinesFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportJsonLinesFilesTest.java @@ -15,9 +15,9 @@ class ExportJsonLinesFilesTest extends AbstractTest { void test(@TempDir Path tempDir) throws Exception { run( "export-json-lines-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--query", READ_AUTHORS_OPTIC_QUERY, - "--fileCount", "2", + "--file-count", "2", "--path", tempDir.toFile().getAbsolutePath() ); @@ -28,10 +28,10 @@ void test(@TempDir Path tempDir) throws Exception { run( "import-json-files", "--path", tempDir.toFile().getAbsolutePath(), - "--jsonLines", - "--connectionString", makeConnectionString(), + "--json-lines", + "--connection-string", makeConnectionString(), "--collections", "imported-json", - "--uriTemplate", "/imported/{LastName}.json", + "--uri-template", "/imported/{LastName}.json", "--permissions", DEFAULT_PERMISSIONS ); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportOrcFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportOrcFilesTest.java index b29c09d9..5a924b82 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportOrcFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportOrcFilesTest.java @@ -16,7 +16,7 @@ class ExportOrcFilesTest extends AbstractTest { void test(@TempDir Path tempDir) { run( "export-orc-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--query", READ_AUTHORS_OPTIC_QUERY, "--partitions", "4", "--path", tempDir.toFile().getAbsolutePath() @@ -30,7 +30,7 @@ void test(@TempDir Path tempDir) { run( "import-orc-files", "--path", tempDir.toFile().getAbsolutePath(), - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "orc-test" ); @@ -42,7 +42,7 @@ void test(@TempDir Path tempDir) { void dynamicParameter(@TempDir Path tempDir) { run( "export-orc-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--query", READ_AUTHORS_OPTIC_QUERY, "--partitions", "1", "--path", tempDir.toFile().getAbsolutePath(), diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportParquetFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportParquetFilesTest.java index 6a9d5cbc..14310a5d 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportParquetFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportParquetFilesTest.java @@ -17,7 +17,7 @@ class ExportParquetFilesTest extends AbstractTest { void test(@TempDir Path tempDir) { run( "export-parquet-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--query", READ_AUTHORS_OPTIC_QUERY, "--path", tempDir.toFile().getAbsolutePath() ); @@ -29,7 +29,7 @@ void test(@TempDir Path tempDir) { run( "import-parquet-files", "--path", tempDir.toFile().getAbsolutePath(), - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "parquet-test" ); @@ -41,7 +41,7 @@ void test(@TempDir Path tempDir) { void saveMode(@TempDir Path tempDir) { String stderr = runAndReturnStderr(() -> run( "export-parquet-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--query", READ_AUTHORS_OPTIC_QUERY, "--path", tempDir.toFile().getAbsolutePath(), "--mode", SaveMode.ERRORIFEXISTS.name() @@ -55,16 +55,16 @@ void saveMode(@TempDir Path tempDir) { void dynamicParameter(@TempDir Path tempDir) { run( "export-parquet-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--query", READ_AUTHORS_OPTIC_QUERY, "--partitions", "2", - "--fileCount", "2", + "--file-count", "2", "--path", tempDir.toFile().getAbsolutePath(), "-Pcompression=gzip" ); File[] files = tempDir.toFile().listFiles(file -> file.getName().endsWith(".gz.parquet")); - assertEquals(2, files.length, "Expecting 2 gzipped Parquet files since --fileCount is 2, and the " + + assertEquals(2, files.length, "Expecting 2 gzipped Parquet files since --file-count is 2, and the " + "-Pcompression option should tell Spark Parquet to use gzip instead of snappy."); } } diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportRdfFilesOptionsTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportRdfFilesOptionsTest.java index 3b5baad9..7182a926 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportRdfFilesOptionsTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/export/ExportRdfFilesOptionsTest.java @@ -14,19 +14,19 @@ class ExportRdfFilesOptionsTest extends AbstractOptionsTest { void test() { ExportRdfFilesCommand command = (ExportRdfFilesCommand) getCommand( "export-rdf-files", - "--connectionString", "test:test@host:8000", + "--connection-string", "test:test@host:8000", "--uris", "/a1.json\n/a2.json", - "--stringQuery", "hello", + "--string-query", "hello", "--query", "", "--graphs", "g1,g2", "--collections", "c1,c2", "--directory", "/dir/", "--options", "my-options", - "--batchSize", "50", - "--partitionsPerForest", "2", + "--batch-size", "50", + "--partitions-per-forest", "2", "--path", "anywhere", "--format", "trig", - "--graphOverride", "use-this-graph" + "--graph-override", "use-this-graph" ); Map options = command.readParams.get(); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportAggregateXmlFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportAggregateXmlFilesTest.java index ffd23ada..7caec208 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportAggregateXmlFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportAggregateXmlFilesTest.java @@ -20,7 +20,7 @@ void elementIsRequired() { String stderr = runAndReturnStderr(() -> run( "import-aggregate-xml-files", "--path", "src/test/resources/xml-file/people.xml", - "--connectionString", makeConnectionString() + "--connection-string", makeConnectionString() )); assertTrue(stderr.contains("The following option is required: [--element]"), "Unexpected stderr: " + stderr); @@ -32,10 +32,10 @@ void withElement() { "import-aggregate-xml-files", "--path", "src/test/resources/xml-file/people.xml", "--element", "person", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "withElement-test", - "--uriReplace", ".*/xml-file,''" + "--uri-replace", ".*/xml-file,''" ); assertCollectionSize("withElement-test", 3); @@ -51,10 +51,10 @@ void withElementAndNamespace() { "--path", "src/test/resources/xml-file/people-with-namespace.xml", "--element", "person", "--namespace", "org:example", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "withElementAndNamespace-test", - "--uriReplace", ".*/xml-file,''" + "--uri-replace", ".*/xml-file,''" ); assertCollectionSize("withElementAndNamespace-test", 3); @@ -70,9 +70,9 @@ void withAllOptions() { "--path", "src/test/resources/xml-file/people-with-namespace.xml", "--element", "person", "--namespace", "org:example", - "--uriElement", "name", - "--uriNamespace", "org:example", - "--connectionString", makeConnectionString(), + "--uri-element", "name", + "--uri-namespace", "org:example", + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "withAllOptions-test" ); @@ -88,12 +88,12 @@ void importZippedXml() { run( "import-aggregate-xml-files", "--path", "src/test/resources/xml-file/single-xml.zip", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "importZippedXml-test", "--compression", "zip", "--element", "", - "--uriReplace", ".*/single-xml.zip,''" + "--uri-replace", ".*/single-xml.zip,''" ); assertCollectionSize("importZippedXml-test", 1); } @@ -104,10 +104,10 @@ void importZippedXmlWithElement() { "import-aggregate-xml-files", "--path", "src/test/resources/xml-file/single-xml.zip", "--element", "person", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "importZippedXmlWithElement-test", - "--uriElement", "name", + "--uri-element", "name", "--compression", "zip" ); assertCollectionSize("importZippedXmlWithElement-test", 3); @@ -122,11 +122,11 @@ void importMultipleZippedXml() throws FileNotFoundException { "import-aggregate-xml-files", "--path", "src/test/resources/xml-file/multiple-xmls.zip", "--element", "", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "importMultipleZippedXml-test", "--compression", "zip", - "--uriReplace", ".*/temp,''" + "--uri-replace", ".*/temp,''" ); assertCollectionSize("importMultipleZippedXml-test", 3); verifyDocContents("/hello.xml", "src/test/resources/xml-file/temp/hello.xml"); @@ -140,11 +140,11 @@ void importGzippedXml() { "import-aggregate-xml-files", "--path", "src/test/resources/xml-file/people.xml.gz", "--element", "person", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "importGzippedXml-test", "--compression", "gzip", - "--uriElement", "name" + "--uri-element", "name" ); assertCollectionSize("importGzippedXml-test", 3); @@ -160,7 +160,7 @@ void dontAbortOnReadFailureByDefault() { "--path", "src/test/resources/parquet/individual/cars.parquet", "--path", "src/test/resources/xml-file/people.xml", "--element", "person", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "agg-xml" )); @@ -175,15 +175,15 @@ void abortOnReadFailure() { String stderr = runAndReturnStderr(() -> run( "import-aggregate-xml-files", "--path", "src/test/resources/parquet/individual/cars.parquet", - "--abortOnReadFailure", + "--abort-on-read-failure", "--element", "person", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "agg-xml" )); assertTrue(stderr.contains("Command failed, cause: Unable to read XML from file"), - "With --abortOnReadFailure included, the command should fail if it cannot read a file; stderr: " + stderr); + "With --abort-on-read-failure included, the command should fail if it cannot read a file; stderr: " + stderr); assertCollectionSize("agg-xml", 0); } diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportArchiveFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportArchiveFilesTest.java index ff29287e..3f96393f 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportArchiveFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportArchiveFilesTest.java @@ -17,8 +17,8 @@ void allMetadata() { run( "import-archive-files", "--path", "src/test/resources/archive-files", - "--uriReplace", ".*archive.zip,''", - "--connectionString", makeConnectionString() + "--uri-replace", ".*archive.zip,''", + "--connection-string", makeConnectionString() ); for (String uri : getUrisInCollection("collection1", 2)) { @@ -43,8 +43,8 @@ void subsetOfMetadata() { "import-archive-files", "--path", "src/test/resources/archive-files", "--categories", "collections,permissions", - "--uriReplace", ".*archive.zip,''", - "--connectionString", makeConnectionString() + "--uri-replace", ".*archive.zip,''", + "--connection-string", makeConnectionString() ); for (String uri : getUrisInCollection("collection1", 2)) { @@ -67,7 +67,7 @@ void dontAbortOnReadFailureByDefault() { "import-archive-files", "--path", "src/test/resources/archive-files", "--path", "src/test/resources/mlcp-archives", - "--connectionString", makeConnectionString() + "--connection-string", makeConnectionString() )); assertFalse(stderr.contains("Command failed"), @@ -82,8 +82,8 @@ void abortOnReadFailure() { "import-archive-files", "--path", "src/test/resources/archive-files", "--path", "src/test/resources/mlcp-archives", - "--abortOnReadFailure", - "--connectionString", makeConnectionString() + "--abort-on-read-failure", + "--connection-string", makeConnectionString() )); assertTrue( diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportAvroFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportAvroFilesTest.java index 1f22a994..478a45e1 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportAvroFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportAvroFilesTest.java @@ -14,10 +14,10 @@ void defaultSettingsMultipleFiles() { run( "import-avro-files", "--path", "src/test/resources/avro/*", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "avro-test", - "--uriTemplate", "/avro/{color}.json" + "--uri-template", "/avro/{color}.json" ); assertCollectionSize("avro-test", 6); @@ -34,10 +34,10 @@ void jsonRootName() { run( "import-avro-files", "--path", "src/test/resources/avro/*", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, - "--jsonRootName", "myAvroData", - "--uriTemplate", "/avro/{/myAvroData/color}.json" + "--json-root-name", "myAvroData", + "--uri-template", "/avro/{/myAvroData/color}.json" ); JsonNode doc = readJsonDocument("/avro/blue.json"); @@ -49,11 +49,11 @@ void ignoreExtension() { run( "import-avro-files", "--path", "src/test/resources/avro/*", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "avro-test", "-PignoreExtension=false", - "--uriTemplate", "/avro/{color}.json" + "--uri-template", "/avro/{color}.json" ); assertCollectionSize("avro-test", 3); @@ -68,7 +68,7 @@ void badConfigurationItem() { run( "import-avro-files", "--path", "src/test/resources/avro/*", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "-Cspark.sql.parquet.filterPushdown=invalid-value" ) @@ -85,7 +85,7 @@ void dontAbortOnReadFailure() { "import-avro-files", "--path", "src/test/resources/avro/colors.avro", "--path", "src/test/resources/json-files/array-of-objects.json", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "avro-data" )); @@ -100,8 +100,8 @@ void abortOnReadFailure() { String stderr = runAndReturnStderr(() -> run( "import-avro-files", "--path", "src/test/resources/json-files/array-of-objects.json", - "--abortOnReadFailure", - "--connectionString", makeConnectionString(), + "--abort-on-read-failure", + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS )); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportDelimitedFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportDelimitedFilesTest.java index d8ae8171..529995ac 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportDelimitedFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportDelimitedFilesTest.java @@ -18,10 +18,10 @@ void defaultSettings() { run( "import-delimited-files", "--path", "src/test/resources/delimited-files/three-rows.csv", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "delimited-test", - "--uriTemplate", "/delimited/{number}.json" + "--uri-template", "/delimited/{number}.json" ); assertCollectionSize("delimited-test", 3); @@ -35,10 +35,10 @@ void jsonRootName() { run( "import-delimited-files", "--path", "src/test/resources/delimited-files/three-rows.csv", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, - "--jsonRootName", "myData", - "--uriTemplate", "/delimited/{/myData/number}.json" + "--json-root-name", "myData", + "--uri-template", "/delimited/{/myData/number}.json" ); JsonNode doc = readJsonDocument("/delimited/1.json"); @@ -50,10 +50,10 @@ void gzip() { run( "import-delimited-files", "--path", "src/test/resources/delimited-files/three-rows.csv.gz", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "delimited-test", - "--uriTemplate", "/delimited/{number}.json" + "--uri-template", "/delimited/{number}.json" ); assertCollectionSize("delimited-test", 3); @@ -68,10 +68,10 @@ void customDelimiter() { "import-delimited-files", "--path", "src/test/resources/delimited-files/semicolon-delimiter.csv", "-Psep=;", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "delimited-test", - "--uriTemplate", "/delimited/{number}.json" + "--uri-template", "/delimited/{number}.json" ); assertCollectionSize("delimited-test", 3); @@ -86,10 +86,10 @@ void noHeader() { "import-delimited-files", "--path", "src/test/resources/delimited-files/no-header.csv", "-Pheader=false", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "no-header", - "--uriTemplate", "/no-header/{_c0}.json" + "--uri-template", "/no-header/{_c0}.json" ); assertCollectionSize("no-header", 2); @@ -108,10 +108,10 @@ void dontInferSchema() { "import-delimited-files", "--path", "src/test/resources/delimited-files/three-rows.csv", "-PinferSchema=false", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "no-schema-inference", - "--uriTemplate", "/delimited/{number}.json" + "--uri-template", "/delimited/{number}.json" ); assertCollectionSize("no-schema-inference", 3); @@ -136,7 +136,7 @@ void limitAndPreview() { "--path", "src/test/resources/delimited-files/three-rows.csv", "--limit", "1", "--preview", "3", - "--previewVertical" + "--preview-vertical" ); }); @@ -151,7 +151,7 @@ void dontAbortOnReadFailure() { "import-delimited-files", "--path", "src/test/resources/delimited-files/three-rows.csv", "--path", "src/test/resources/xml-file/single-xml.zip", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "delimited-test" )); @@ -184,15 +184,15 @@ void abortOnReadFailure() { "import-delimited-files", "--path", "src/test/resources/delimited-files/three-rows.csv", "--path", "src/test/resources/xml-file/single-xml.zip", - "--abortOnReadFailure", - "--connectionString", makeConnectionString(), + "--abort-on-read-failure", + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "delimited-test" )); assertCollectionSize("delimited-test", 0); assertTrue(stderr.contains("Command failed, cause: [MALFORMED_RECORD_IN_PARSING]"), "The command should " + - "have failed due to --abortOnReadFailure being included. This should result in the 'mode' option being " + + "have failed due to --abort-on-read-failure being included. This should result in the 'mode' option being " + "set to FAILFAST."); } diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportFilesOptionsTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportFilesOptionsTest.java index 1377953d..b777c804 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportFilesOptionsTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportFilesOptionsTest.java @@ -16,21 +16,21 @@ void test() { "--password", "someword", "--path", "src/test/resources/mixed-files/hello*", "--partitions", "6", - "--documentType", "XML", - "--abortOnWriteFailure", - "--batchSize", "50", + "--document-type", "XML", + "--abort-on-write-failure", + "--batch-size", "50", "--collections", "collection1", - "--failedDocumentsPath", "/my/failures", + "--failed-documents-path", "/my/failures", "--permissions", "role1,read,role2,update", - "--temporalCollection", "temporal1", - "--threadCount", "17", + "--temporal-collection", "temporal1", + "--thread-count", "17", "--transform", "transform1", - "--transformParams", "param1;value1", - "--transformParamsDelimiter", ";", - "--uriPrefix", "/prefix", - "--uriReplace", ".*value,''", - "--uriSuffix", ".suffix", - "--uriTemplate", "/test/{value}.json" + "--transform-params", "param1;value1", + "--transform-params-delimiter", ";", + "--uri-prefix", "/prefix", + "--uri-replace", ".*value,''", + "--uri-suffix", ".suffix", + "--uri-template", "/test/{value}.json" ); assertOptions(command.getConnectionParams().makeOptions(), diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportFilesTest.java index c483c234..16899370 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportFilesTest.java @@ -23,10 +23,10 @@ void test() { run( "import-files", "--path", "src/test/resources/mixed-files/hello*", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "files", - "--uriReplace", ".*/mixed-files,''" + "--uri-replace", ".*/mixed-files,''" ); verifyDocsWereWritten(uris.length, uris); @@ -37,12 +37,12 @@ void documentType() { run( "import-files", "--path", "src/test/resources/mixed-files/hello.xml", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "files", - "--uriReplace", ".*/mixed-files,''", - "--uriSuffix", ".unknown", - "--documentType", "xml" + "--uri-replace", ".*/mixed-files,''", + "--uri-suffix", ".unknown", + "--document-type", "xml" ); String kind = getDatabaseClient().newServerEval() @@ -61,8 +61,8 @@ void preview() { "import-files", "--path", "src/test/resources/mixed-files", "--preview", "2", - "--previewDrop", "content", "modificationTime", - "--previewVertical" + "--preview-drop", "content", "modificationTime", + "--preview-vertical" )); String message = "Unexpected output to stdout: " + stdout; @@ -83,9 +83,9 @@ void fileOptions(@TempDir Path tempDir) throws IOException { File optionsFile = new File(tempDir.toFile(), "options.txt"); String options = "--path\n" + "src/test/resources/mixed-files/hello*\n" + - "--connectionString\n" + + "--connection-string\n" + makeConnectionString() + "\n" + - "--uriReplace\n" + + "--uri-replace\n" + ".*/mixed-files,''"; FileCopyUtils.copy(options.getBytes(), optionsFile); @@ -104,10 +104,10 @@ void zipTest() { run( "import-files", "--path", "src/test/resources/mixed-files/goodbye.zip", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "files", - "--uriReplace", ".*/mixed-files,''", + "--uri-replace", ".*/mixed-files,''", "--compression", "zip" ); @@ -119,10 +119,10 @@ void zipCaseSensitivityTest() { run( "import-files", "--path", "src/test/resources/mixed-files/goodbye.zip", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "files", - "--uriReplace", ".*/mixed-files,''", + "--uri-replace", ".*/mixed-files,''", "--compression", "ZIp" ); @@ -134,10 +134,10 @@ void gzipTest() { run( "import-files", "--path", "src/test/resources/mixed-files/hello2.txt.gz", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--collections", "files", "--permissions", DEFAULT_PERMISSIONS, - "--uriReplace", ".*/mixed-files,''", + "--uri-replace", ".*/mixed-files,''", "--compression", "gzip" ); @@ -149,10 +149,10 @@ void fileOptionsFilter() { run( "import-files", "--path", "src/test/resources/mixed-files/hello*", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "files", - "--uriReplace", ".*/mixed-files,''", + "--uri-replace", ".*/mixed-files,''", "--filter", "*.json" ); @@ -164,11 +164,11 @@ void fileOptionsRecursiveFileLookupDefault() { run( "import-files", "--path", "src/test/resources/mixed-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "files", "--filter", "*.json", - "--uriReplace", ".*/mixed-files,''" + "--uri-replace", ".*/mixed-files,''" ); assertCollectionSize("files", 2); @@ -179,12 +179,12 @@ void fileOptionsRecursiveFileLookupFalse() { run( "import-files", "--path", "src/test/resources/mixed-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "files", "--filter", "*.json", - "--recursiveFileLookup", "false", - "--uriReplace", ".*/mixed-files,''" + "--recursive-file-lookup", "false", + "--uri-replace", ".*/mixed-files,''" ); assertCollectionSize("files", 1); @@ -196,10 +196,10 @@ void invalidGzippedFile() { "import-files", "--path", "src/test/resources/json-files/array-of-objects.json", "--path", "src/test/resources/mixed-files/hello2.txt.gz", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--collections", "files", "--permissions", DEFAULT_PERMISSIONS, - "--uriReplace", ".*/mixed-files,''", + "--uri-replace", ".*/mixed-files,''", "--compression", "gzip" ); @@ -213,14 +213,14 @@ void abortOnReadFailure() { String stderr = runAndReturnStderr(() -> run( "import-files", "--path", "src/test/resources/json-files/array-of-objects.json", - "--abortOnReadFailure", - "--connectionString", makeConnectionString(), + "--abort-on-read-failure", + "--connection-string", makeConnectionString(), "--collections", "files", "--permissions", DEFAULT_PERMISSIONS, "--compression", "gzip" )); - assertTrue(stderr.contains("Command failed, cause: Unable to read file at"), "With --abortReadOnFailure, " + + assertTrue(stderr.contains("Command failed, cause: Unable to read file at"), "With --abort-read-on-failure, " + "the command should fail when it encounters an invalid gzipped file."); assertCollectionSize("files", 0); } diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportFromS3Test.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportFromS3Test.java index d5fb3fb3..87ae4fc2 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportFromS3Test.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportFromS3Test.java @@ -22,8 +22,8 @@ void test() { "import-files", "--path", path, "--preview", "10", - "--previewDrop", "content", "modificationTime", - "--s3AddCredentials" + "--preview-drop", "content", "modificationTime", + "--s3-add-credentials" )); assertNotNull(stdout); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportJdbcTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportJdbcTest.java index 7c216a48..752ae64c 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportJdbcTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportJdbcTest.java @@ -15,14 +15,14 @@ class ImportJdbcTest extends AbstractTest { void tenCustomers() { run( "import-jdbc", - "--jdbcUrl", PostgresUtil.URL, - "--jdbcUser", PostgresUtil.USER, - "--jdbcPassword", PostgresUtil.PASSWORD, - "--jdbcDriver", PostgresUtil.DRIVER, + "--jdbc-url", PostgresUtil.URL, + "--jdbc-user", PostgresUtil.USER, + "--jdbc-password", PostgresUtil.PASSWORD, + "--jdbc-driver", PostgresUtil.DRIVER, "--query", "select * from customer where customer_id < 11", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, - "--uriTemplate", "/customer/{customer_id}.json", + "--uri-template", "/customer/{customer_id}.json", "--collections", "customer" ); @@ -33,15 +33,15 @@ void tenCustomers() { void jsonRootName() { run( "import-jdbc", - "--jdbcUrl", PostgresUtil.URL, - "--jdbcUser", PostgresUtil.USER, - "--jdbcPassword", PostgresUtil.PASSWORD, - "--jdbcDriver", PostgresUtil.DRIVER, + "--jdbc-url", PostgresUtil.URL, + "--jdbc-user", PostgresUtil.USER, + "--jdbc-password", PostgresUtil.PASSWORD, + "--jdbc-driver", PostgresUtil.DRIVER, "--query", "select * from customer where customer_id < 11", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, - "--jsonRootName", "customer", - "--uriTemplate", "/customer/{/customer/customer_id}.json", + "--json-root-name", "customer", + "--uri-template", "/customer/{/customer/customer_id}.json", "--collections", "customer" ); @@ -53,12 +53,12 @@ void jsonRootName() { void tenCustomersWithUserAndPasswordInUrl() { run( "import-jdbc", - "--jdbcUrl", PostgresUtil.URL_WITH_AUTH, - "--jdbcDriver", PostgresUtil.DRIVER, + "--jdbc-url", PostgresUtil.URL_WITH_AUTH, + "--jdbc-driver", PostgresUtil.DRIVER, "--query", "select * from customer where customer_id < 11", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, - "--uriTemplate", "/customer/{customer_id}.json", + "--uri-template", "/customer/{customer_id}.json", "--collections", "customer" ); @@ -69,15 +69,15 @@ void tenCustomersWithUserAndPasswordInUrl() { void allCustomers() { run( "import-jdbc", - "--jdbcUrl", PostgresUtil.URL_WITH_AUTH, + "--jdbc-url", PostgresUtil.URL_WITH_AUTH, "--query", "select * from customer", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "customer", "--repartition", "2", // Just verifying that these work without causing any errors. - "--totalThreadCount", "16", - "--batchSize", "10" + "--total-thread-count", "16", + "--batch-size", "10" ); assertCollectionSize("customer", 599); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportJdbcWithAggregatesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportJdbcWithAggregatesTest.java index 8c88fc14..e4907b0c 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportJdbcWithAggregatesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportJdbcWithAggregatesTest.java @@ -24,14 +24,14 @@ void customerWithArrayOfRentals() { run( "import-jdbc", - "--jdbcUrl", PostgresUtil.URL_WITH_AUTH, - "--jdbcDriver", PostgresUtil.DRIVER, + "--jdbc-url", PostgresUtil.URL_WITH_AUTH, + "--jdbc-driver", PostgresUtil.DRIVER, "--query", query, - "--groupBy", "customer_id", + "--group-by", "customer_id", "--aggregate", "payments=payment_id;amount;payment_date", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, - "--uriTemplate", "/customer/{customer_id}.json" + "--uri-template", "/customer/{customer_id}.json" ); JsonNode doc = readJsonDocument("/customer/1.json"); @@ -73,15 +73,15 @@ void customerWithArrayOfRentalsAndArrayOfPayments() { run( "import-jdbc", - "--jdbcUrl", PostgresUtil.URL_WITH_AUTH, - "--jdbcDriver", PostgresUtil.DRIVER, + "--jdbc-url", PostgresUtil.URL_WITH_AUTH, + "--jdbc-driver", PostgresUtil.DRIVER, "--query", query, - "--groupBy", "customer_id", + "--group-by", "customer_id", "--aggregate", "payments=payment_id;amount", "--aggregate", "rentals=rental_id;inventory_id", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, - "--uriTemplate", "/customer/{customer_id}.json" + "--uri-template", "/customer/{customer_id}.json" ); JsonNode doc = readJsonDocument("/customer/1.json"); @@ -118,14 +118,14 @@ void joinThatProducesArrayWithAtomicValues() { run( "import-jdbc", - "--jdbcUrl", PostgresUtil.URL_WITH_AUTH, - "--jdbcDriver", PostgresUtil.DRIVER, + "--jdbc-url", PostgresUtil.URL_WITH_AUTH, + "--jdbc-driver", PostgresUtil.DRIVER, "--query", query, - "--groupBy", "film_id", + "--group-by", "film_id", "--aggregate", "actor_ids=actor_id", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, - "--uriTemplate", "/film/{film_id}.json" + "--uri-template", "/film/{film_id}.json" ); JsonNode film = readJsonDocument("/film/1.json"); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportJsonFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportJsonFilesTest.java index 0c278751..5304423a 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportJsonFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportJsonFilesTest.java @@ -17,10 +17,10 @@ void objectFilesAndArrayOfObjectsFile() { run( "import-json-files", "--path", "src/test/resources/json-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "json-objects", - "--uriTemplate", "/json-object/{number}.json", + "--uri-template", "/json-object/{number}.json", "--filter", "*.json" ); @@ -48,11 +48,11 @@ void jsonLines() { run( "import-json-files", "--path", "src/test/resources/delimited-files/line-delimited-json.txt", - "--jsonLines", - "--connectionString", makeConnectionString(), + "--json-lines", + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "delimited-json-test", - "--uriTemplate", "/delimited/{lastName}.json" + "--uri-template", "/delimited/{lastName}.json" ); assertCollectionSize("delimited-json-test", 3); @@ -66,12 +66,12 @@ void jsonRootName() { run( "import-json-files", "--path", "src/test/resources/delimited-files/line-delimited-json.txt", - "--jsonLines", - "--connectionString", makeConnectionString(), + "--json-lines", + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "delimited-json-test", - "--jsonRootName", "myData", - "--uriTemplate", "/delimited/{/myData/lastName}.json" + "--json-root-name", "myData", + "--uri-template", "/delimited/{/myData/lastName}.json" ); JsonNode doc = readJsonDocument("/delimited/lastName-1.json"); @@ -83,12 +83,12 @@ void jsonLinesWithCustomDelimiter() { run( "import-json-files", "--path", "src/test/resources/delimited-files/custom-delimiter-json.txt", - "--jsonLines", + "--json-lines", "-PlineSep=:\n", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "custom-delimited-test", - "--uriTemplate", "/custom/delimited/{firstName}.json" + "--uri-template", "/custom/delimited/{firstName}.json" ); assertCollectionSize("custom-delimited-test", 3); @@ -105,7 +105,7 @@ void jsonLinesWithCustomDelimiter() { *

* So for zip files, the best we can do is use our own reader, which is limited to reading each file as a "file row" * and then writing it as a document to MarkLogic. Which means that a user cannot use a feature like - * "--uriTemplate", as that depends on having values in columns that can be referenced by the template. We will + * "--uri-template", as that depends on having values in columns that can be referenced by the template. We will * hopefully be enhancing this in a future story - specifically, by enhancing the URI template feature to work on * file rows and document rows. */ @@ -115,10 +115,10 @@ void zipOfJsonObjectFiles() { "import-files", "--path", "src/test/resources/json-files/object-files/objects.zip", "--compression", "zip", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "zipped-objects", - "--uriReplace", ".*object-files,''" + "--uri-replace", ".*object-files,''" ); JsonNode doc = readJsonDocument("/objects.zip/object3.json"); @@ -133,8 +133,8 @@ void dontAbortOnReadFailure() { "import-json-files", "--path", "src/test/resources/delimited-files/line-delimited-json.txt", "--path", "src/test/resources/xml-file/single-xml.zip", - "--jsonLines", - "--connectionString", makeConnectionString(), + "--json-lines", + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "delimited-json-test" )); @@ -155,16 +155,16 @@ void abortOnReadFailure() { "import-json-files", "--path", "src/test/resources/delimited-files/line-delimited-json.txt", "--path", "src/test/resources/xml-file/single-xml.zip", - "--jsonLines", - "--abortOnReadFailure", - "--connectionString", makeConnectionString(), + "--json-lines", + "--abort-on-read-failure", + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "delimited-json-test" )); assertCollectionSize("delimited-json-test", 0); assertTrue(stderr.contains("Command failed, cause: Invalid UTF-8 start"), "The command should have failed " + - "due to the invalid single-xml.zip file being included along with --abortOnReadFailure being " + + "due to the invalid single-xml.zip file being included along with --abort-on-read-failure being " + "included as well; actual stderr: " + stderr); } diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportMlcpArchiveFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportMlcpArchiveFilesTest.java index c00c6b90..c6951378 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportMlcpArchiveFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportMlcpArchiveFilesTest.java @@ -18,7 +18,7 @@ void allMetadata() { run( "import-mlcp-archive-files", "--path", "src/test/resources/mlcp-archives", - "--connectionString", makeConnectionString() + "--connection-string", makeConnectionString() ); for (String uri : getUrisInCollection("collection1", 2)) { @@ -43,7 +43,7 @@ void subsetOfMetadata() { "import-mlcp-archive-files", "--path", "src/test/resources/mlcp-archives", "--categories", "collections,permissions", - "--connectionString", makeConnectionString() + "--connection-string", makeConnectionString() ); for (String uri : getUrisInCollection("collection1", 2)) { @@ -66,7 +66,7 @@ void invalidFileDontAbort() { "import-mlcp-archive-files", "--path", "src/test/resources/mlcp-archives", "--path", "src/test/resources/mixed-files/goodbye.zip", - "--connectionString", makeConnectionString() + "--connection-string", makeConnectionString() ); assertCollectionSize("The error from the non-MLCP-archive file goodbye.zip should have been logged " + @@ -79,8 +79,8 @@ void invalidFileAbort() { String stderr = runAndReturnStderr(() -> run( "import-mlcp-archive-files", "--path", "src/test/resources/mixed-files/goodbye.zip", - "--abortOnReadFailure", - "--connectionString", makeConnectionString() + "--abort-on-read-failure", + "--connection-string", makeConnectionString() )); assertTrue(stderr.contains("Command failed, cause: Unable to read metadata for entry: goodbye.json"), diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportOrcFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportOrcFilesTest.java index 2ff24edf..fa4b1f4e 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportOrcFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportOrcFilesTest.java @@ -14,10 +14,10 @@ void orcFileTest() { run( "import-orc-files", "--path", "src/test/resources/orc-files/authors.orc", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "orcFile-test", - "--uriTemplate", "/orc-test/{LastName}.json" + "--uri-template", "/orc-test/{LastName}.json" ); getUrisInCollection("orcFile-test", 15).forEach(this::verifyDocContent); @@ -28,11 +28,11 @@ void jsonRootName() { run( "import-orc-files", "--path", "src/test/resources/orc-files/authors.orc", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, - "--uriPrefix", "/orc-test", - "--jsonRootName", "myOrcData", - "--uriTemplate", "/orc/{/myOrcData/LastName}.json" + "--uri-prefix", "/orc-test", + "--json-root-name", "myOrcData", + "--uri-template", "/orc/{/myOrcData/LastName}.json" ); JsonNode doc = readJsonDocument("/orc/Humbee.json"); @@ -44,10 +44,10 @@ void orcFileWithCompressionTest() { run( "import-orc-files", "--path", "src/test/resources/orc-files/authors.orc", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "compression-test", - "--uriTemplate", "/orc-compressed-test{LastName}.json", + "--uri-template", "/orc-compressed-test{LastName}.json", "-Pcompression=snappy" ); @@ -60,7 +60,7 @@ void badConfigurationItem() { run( "import-orc-files", "--path", "src/test/resources/orc-files", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "-Cspark.sql.parquet.filterPushdown=invalid-value" ) @@ -77,7 +77,7 @@ void dontAbortOnReadFailure() { "import-orc-files", "--path", "src/test/resources/orc-files/authors.orc", "--path", "src/test/resources/avro/colors.avro", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "orc-data" )); @@ -92,8 +92,8 @@ void abortOnReadFailure() { String stderr = runAndReturnStderr(() -> run( "import-parquet-files", "--path", "src/test/resources/avro/colors.avro", - "--abortOnReadFailure", - "--connectionString", makeConnectionString(), + "--abort-on-read-failure", + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS )); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportParquetFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportParquetFilesTest.java index f76dc4cc..fe3652aa 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportParquetFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportParquetFilesTest.java @@ -14,10 +14,10 @@ void defaultSettingsSingleFile() { run( "import-parquet-files", "--path", "src/test/resources/parquet/individual/cars.parquet", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "parquet-test", - "--uriTemplate", "/parquet/{model}.json" + "--uri-template", "/parquet/{model}.json" ); assertCollectionSize("parquet-test", 32); @@ -32,7 +32,7 @@ void count() { "import-parquet-files", "--count", "--path", "src/test/resources/parquet/individual/cars.parquet", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "parquet-test" )); @@ -48,11 +48,11 @@ void jsonRootName() { run( "import-parquet-files", "--path", "src/test/resources/parquet/individual/cars.parquet", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "parquet-test", - "--jsonRootName", "car", - "--uriTemplate", "/parquet/{/car/model}.json" + "--json-root-name", "car", + "--uri-template", "/parquet/{/car/model}.json" ); JsonNode doc = readJsonDocument("/parquet/Toyota Corolla.json"); @@ -64,11 +64,11 @@ void defaultSettingsMultipleFileDifferentSchema_mergeTrue() { run( "import-parquet-files", "--path", "src/test/resources/parquet/related/*.parquet", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "parquet-test", "-PmergeSchema=true", - "--uriTemplate", "/parquet/{color}.json" + "--uri-template", "/parquet/{color}.json" ); assertCollectionSize("parquet-test", 6); @@ -92,7 +92,7 @@ void invalidParquetFile() { run("import-parquet-files", "--path", "src/test/resources/parquet/individual/invalid.parquet", "--preview", "10", - "--abortOnReadFailure" + "--abort-on-read-failure" ) ); @@ -110,7 +110,7 @@ void badConfigurationItem() { run( "import-parquet-files", "--path", "src/test/resources/parquet/individual/cars.parquet", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "-Cspark.sql.parquet.filterPushdown=invalid-value" ) @@ -130,7 +130,7 @@ void dontAbortOnReadFailure() { // Without mergeSchema=true, Spark will throw an error of "Unable to infer schema for Parquet". This seems // to occur if there's at least one bad file. With mergeSchema=true, "-PmergeSchema=true", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "parquet-cars" )); @@ -146,11 +146,11 @@ void abortOnReadFailure() { "import-parquet-files", "--path", "src/test/resources/parquet/individual/cars.parquet", "--path", "src/test/resources/avro/colors.avro", - "--abortOnReadFailure", + "--abort-on-read-failure", // This is kept here to ensure the command fails because it could read the Avro file and not because // Spark could not infer a schema. "-PmergeSchema=true", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS )); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportRdfFilesTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportRdfFilesTest.java index 2d47ce2e..a88d8671 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportRdfFilesTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportRdfFilesTest.java @@ -14,7 +14,7 @@ void noGraph() { run( "import-rdf-files", "--path", "src/test/resources/rdf/englishlocale.ttl", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "my-triples" ); @@ -34,7 +34,7 @@ void withGraph() { run( "import-rdf-files", "--path", "src/test/resources/rdf/englishlocale.ttl", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--graph", "my-graph" ); @@ -48,14 +48,14 @@ void withGraphOverride() { run( "import-rdf-files", "--path", "src/test/resources/rdf/three-quads.trig", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, - "--graphOverride", "my-other-graph" + "--graph-override", "my-other-graph" ); assertCollectionSize( "All the quads in three-quads.trig should be added to the same managed triples document; their graphs " + - "should be ignored in favor of the --graphOverride value.", "my-other-graph", 2 + "should be ignored in favor of the --graph-override value.", "my-other-graph", 2 ); // Make sure nothing got written to any of the other possible graphs. @@ -70,7 +70,7 @@ void gzippedFile() { run( "import-rdf-files", "--path", "src/test/resources/rdf/englishlocale2.ttl.gz", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--compression", "gzip" ); @@ -86,7 +86,7 @@ void zipContainingEachFileType() { run( "import-rdf-files", "--path", "src/test/resources/rdf/each-rdf-file-type.zip", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "all-my-rdf", "--compression", "zip" @@ -112,7 +112,7 @@ void invalidFileDontAbort() { "import-rdf-files", "--path", "src/test/resources/mixed-files/hello2.txt.gz", "--path", "src/test/resources/rdf/englishlocale.ttl", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "my-triples" ); @@ -126,8 +126,8 @@ void invalidFileAbort() { String stderr = runAndReturnStderr(() -> run( "import-rdf-files", "--path", "src/test/resources/mixed-files/hello2.txt.gz", - "--abortOnReadFailure", - "--connectionString", makeConnectionString(), + "--abort-on-read-failure", + "--connection-string", makeConnectionString(), "--collections", "my-triples" )); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportRowsAsXmlTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportRowsAsXmlTest.java index 8aa40a5a..298d4fe5 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportRowsAsXmlTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/importdata/ImportRowsAsXmlTest.java @@ -16,12 +16,12 @@ void delimitedText() { run( "import-delimited-files", "--path", "src/test/resources/delimited-files/three-rows.csv", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "delimited-xml", - "--uriTemplate", "/delimited/{number}.xml", - "--xmlRootName", "myDelimitedText", - "--xmlNamespace", "csv.org" + "--uri-template", "/delimited/{number}.xml", + "--xml-root-name", "myDelimitedText", + "--xml-namespace", "csv.org" ); assertCollectionSize("delimited-xml", 3); @@ -34,17 +34,17 @@ void delimitedText() { void jdbc() { run( "import-jdbc", - "--jdbcUrl", PostgresUtil.URL, - "--jdbcUser", PostgresUtil.USER, - "--jdbcPassword", PostgresUtil.PASSWORD, - "--jdbcDriver", PostgresUtil.DRIVER, + "--jdbc-url", PostgresUtil.URL, + "--jdbc-user", PostgresUtil.USER, + "--jdbc-password", PostgresUtil.PASSWORD, + "--jdbc-driver", PostgresUtil.DRIVER, "--query", "select * from customer where customer_id < 11", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, - "--uriTemplate", "/customer/{customer_id}.xml", + "--uri-template", "/customer/{customer_id}.xml", "--collections", "jdbc-customer", - "--xmlRootName", "CUSTOMER", - "--xmlNamespace", "org:example" + "--xml-root-name", "CUSTOMER", + "--xml-namespace", "org:example" ); assertCollectionSize("jdbc-customer", 10); @@ -58,12 +58,12 @@ void parquet() { run( "import-parquet-files", "--path", "src/test/resources/parquet/individual/cars.parquet", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "parquet-test", - "--xmlRootName", "myParquet", - "--xmlNamespace", "parquet.org", - "--uriTemplate", "/parquet/{model}.xml" + "--xml-root-name", "myParquet", + "--xml-namespace", "parquet.org", + "--uri-template", "/parquet/{model}.xml" ); assertCollectionSize("parquet-test", 32); @@ -77,12 +77,12 @@ void avro() { run( "import-avro-files", "--path", "src/test/resources/avro/*", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "avro-test", - "--uriTemplate", "/avro/{color}.xml", - "--xmlRootName", "myAvro", - "--xmlNamespace", "avro.org" + "--uri-template", "/avro/{color}.xml", + "--xml-root-name", "myAvro", + "--xml-namespace", "avro.org" ); assertCollectionSize("avro-test", 6); @@ -96,12 +96,12 @@ void orc() { run( "import-orc-files", "--path", "src/test/resources/orc-files/authors.orc", - "--connectionString", makeConnectionString(), + "--connection-string", makeConnectionString(), "--permissions", DEFAULT_PERMISSIONS, "--collections", "orc-test", - "--uriTemplate", "/orc/{ForeName}", - "--xmlRootName", "myOrc", - "--xmlNamespace", "orc.org" + "--uri-template", "/orc/{ForeName}", + "--xml-root-name", "myOrc", + "--xml-namespace", "orc.org" ); assertCollectionSize("orc-test", 15); diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/reprocess/ReprocessOptionsTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/reprocess/ReprocessOptionsTest.java index be0a3ec2..f160de58 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/reprocess/ReprocessOptionsTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/reprocess/ReprocessOptionsTest.java @@ -13,12 +13,12 @@ class ReprocessOptionsTest extends AbstractOptionsTest { @Test void readInvoke() { ReprocessCommand command = (ReprocessCommand) getCommand("reprocess", - "--connectionString", "user:password@host:8000", - "--readInvoke", "/my/invoke.sjs", - "--readPartitionsInvoke", "/my/other-invoke.sjs", - "--readVar", "param1=value1", - "--readVar", "param2=spaces work!", - "--writeInvoke", "/my/invoke.sjs" + "--connection-string", "user:password@host:8000", + "--read-invoke", "/my/invoke.sjs", + "--read-partitions-invoke", "/my/other-invoke.sjs", + "--read-var", "param1=value1", + "--read-var", "param2=spaces work!", + "--write-invoke", "/my/invoke.sjs" ); assertOptions(command.readParams.get(), @@ -32,15 +32,15 @@ void readInvoke() { @Test void writeInvoke() { ReprocessCommand command = (ReprocessCommand) getCommand("reprocess", - "--connectionString", "user:password@host:8000", - "--readInvoke", "/my/invoke.sjs", - "--writeInvoke", "/my/invoke.sjs", - "--externalVariableName", "MY_VAR", - "--externalVariableDelimiter", ";", - "--abortOnWriteFailure", - "--batchSize", "123", - "--writeVar", "param1=value1", - "--writeVar", "param2=spaces work!" + "--connection-string", "user:password@host:8000", + "--read-invoke", "/my/invoke.sjs", + "--write-invoke", "/my/invoke.sjs", + "--external-variable-name", "MY_VAR", + "--external-variable-delimiter", ";", + "--abort-on-write-failure", + "--batch-size", "123", + "--write-var", "param1=value1", + "--write-var", "param2=spaces work!" ); assertOptions(command.writeParams.get(), @@ -57,10 +57,10 @@ void writeInvoke() { @Test void readJavascript() { ReprocessCommand command = (ReprocessCommand) getCommand("reprocess", - "--connectionString", "user:password@host:8000", - "--readJavascript", "fn.currentDate()", - "--readPartitionsJavascript", "console.log('')", - "--writeJavascript", "fn.currentDate()" + "--connection-string", "user:password@host:8000", + "--read-javascript", "fn.currentDate()", + "--read-partitions-javascript", "console.log('')", + "--write-javascript", "fn.currentDate()" ); assertOptions(command.readParams.get(), @@ -72,9 +72,9 @@ void readJavascript() { @Test void writeJavascript() { ReprocessCommand command = (ReprocessCommand) getCommand("reprocess", - "--connectionString", "user:password@host:8000", - "--readJavascript", "fn.currentDate()", - "--writeJavascript", "fn.currentDate()" + "--connection-string", "user:password@host:8000", + "--read-javascript", "fn.currentDate()", + "--write-javascript", "fn.currentDate()" ); assertOptions(command.writeParams.get(), @@ -85,10 +85,10 @@ void writeJavascript() { @Test void readXquery() { ReprocessCommand command = (ReprocessCommand) getCommand("reprocess", - "--connectionString", "user:password@host:8000", - "--readXquery", "fn:current-date()", - "--readPartitionsXquery", "xdmp:log('')", - "--writeXquery", "fn:current-date()" + "--connection-string", "user:password@host:8000", + "--read-xquery", "fn:current-date()", + "--read-partitions-xquery", "xdmp:log('')", + "--write-xquery", "fn:current-date()" ); assertOptions(command.readParams.get(), @@ -100,9 +100,9 @@ void readXquery() { @Test void writeXquery() { ReprocessCommand command = (ReprocessCommand) getCommand("reprocess", - "--connectionString", "user:password@host:8000", - "--readXquery", "fn:current-date()", - "--writeXquery", "fn:current-date()" + "--connection-string", "user:password@host:8000", + "--read-xquery", "fn:current-date()", + "--write-xquery", "fn:current-date()" ); assertOptions(command.writeParams.get(), @@ -113,10 +113,10 @@ void writeXquery() { @Test void readJavascriptFile() { ReprocessCommand command = (ReprocessCommand) getCommand("reprocess", - "--connectionString", "user:password@host:8000", - "--readJavascriptFile", "my-code.js", - "--readPartitionsJavascriptFile", "path/my-partitions.js", - "--writeJavascript", "fn.currentDate()" + "--connection-string", "user:password@host:8000", + "--read-javascript-file", "my-code.js", + "--read-partitions-javascript-file", "path/my-partitions.js", + "--write-javascript", "fn.currentDate()" ); assertOptions(command.readParams.get(), @@ -128,10 +128,10 @@ void readJavascriptFile() { @Test void readXqueryFile() { ReprocessCommand command = (ReprocessCommand) getCommand("reprocess", - "--connectionString", "user:password@host:8000", - "--readXqueryFile", "my-code.xqy", - "--readPartitionsXqueryFile", "path/my-partitions.xqy", - "--writeJavascript", "fn.currentDate()" + "--connection-string", "user:password@host:8000", + "--read-xquery-file", "my-code.xqy", + "--read-partitions-xquery-file", "path/my-partitions.xqy", + "--write-javascript", "fn.currentDate()" ); assertOptions(command.readParams.get(), @@ -143,9 +143,9 @@ void readXqueryFile() { @Test void writeJavascriptFile() { ReprocessCommand command = (ReprocessCommand) getCommand("reprocess", - "--connectionString", "user:password@host:8000", - "--readJavascript", "doesn't matter", - "--writeJavascriptFile", "my-code.js" + "--connection-string", "user:password@host:8000", + "--read-javascript", "doesn't matter", + "--write-javascript-file", "my-code.js" ); assertOptions(command.writeParams.get(), @@ -156,9 +156,9 @@ void writeJavascriptFile() { @Test void writeXqueryFile() { ReprocessCommand command = (ReprocessCommand) getCommand("reprocess", - "--connectionString", "user:password@host:8000", - "--readJavascript", "doesn't matter", - "--writeXqueryFile", "my-code.xqy" + "--connection-string", "user:password@host:8000", + "--read-javascript", "doesn't matter", + "--write-xquery-file", "my-code.xqy" ); assertOptions(command.writeParams.get(), diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/reprocess/ReprocessTest.java b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/reprocess/ReprocessTest.java index a172d925..72258f38 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/impl/reprocess/ReprocessTest.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/impl/reprocess/ReprocessTest.java @@ -17,11 +17,11 @@ class ReprocessTest extends AbstractTest { void test() { run( "reprocess", - "--connectionString", makeConnectionString(), - "--readJavascript", "var collection; cts.uris(null, null, cts.collectionQuery(collection))", - "--readVar", "collection=author", - "--writeInvoke", "/writeDocument.sjs", - "--writeVar", "theValue=my value" + "--connection-string", makeConnectionString(), + "--read-javascript", "var collection; cts.uris(null, null, cts.collectionQuery(collection))", + "--read-var", "collection=author", + "--write-invoke", "/writeDocument.sjs", + "--write-var", "theValue=my value" ); // reprocess-test is the collection used by writeDocument.sjs. @@ -38,8 +38,8 @@ void test() { void previewDoesntRequireWriteParam() { String stdout = runAndReturnStdout(() -> run( "reprocess", - "--connectionString", makeConnectionString(), - "--readJavascript", "cts.uris(null, null, cts.collectionQuery('author'))", + "--connection-string", makeConnectionString(), + "--read-javascript", "cts.uris(null, null, cts.collectionQuery('author'))", "--preview", "2" )); @@ -52,11 +52,11 @@ void previewDoesntRequireWriteParam() { void missingReadParam() { String stderr = runAndReturnStderr(() -> run( "reprocess", - "--connectionString", makeConnectionString() + "--connection-string", makeConnectionString() )); assertTrue( - stderr.contains("Must specify one of --readInvoke, --readJavascript, --readXquery, --readJavascriptFile, or --readXqueryFile."), + stderr.contains("Must specify one of --read-invoke, --read-javascript, --read-xquery, --read-javascript-file, or --read-xquery-file."), "Unexpected stderr: " + stderr ); } @@ -65,12 +65,12 @@ void missingReadParam() { void missingWriteParam() { String stderr = runAndReturnStderr(() -> run( "reprocess", - "--connectionString", makeConnectionString(), - "--readJavascript", "fn.currentDate()" + "--connection-string", makeConnectionString(), + "--read-javascript", "fn.currentDate()" )); assertTrue( - stderr.contains("Must specify one of --writeInvoke, --writeJavascript, --writeXquery, --writeJavascriptFile, or --writeXqueryFile."), + stderr.contains("Must specify one of --write-invoke, --write-javascript, --write-xquery, --write-javascript-file, or --write-xquery-file."), "Unexpected stderr: " + stderr ); } @@ -79,16 +79,16 @@ void missingWriteParam() { void moreThanOnePartitionParam() { String stderr = runAndReturnStderr(() -> run( "reprocess", - "--connectionString", makeConnectionString(), - "--readJavascript", "doesn't matter", - "--writeJavascript", "doesn't matter", - "--readPartitionsJavascript", "doesn't matter", - "--readPartitionsJavascriptFile", "doesn't matter" + "--connection-string", makeConnectionString(), + "--read-javascript", "doesn't matter", + "--write-javascript", "doesn't matter", + "--read-partitions-javascript", "doesn't matter", + "--read-partitions-javascript-file", "doesn't matter" )); assertTrue( - stderr.contains("Can only specify one of --readPartitionsInvoke, --readPartitionsJavascript, " + - "--readPartitionsXquery, --readPartitionsJavascriptFile, or --readPartitionsXqueryFile."), + stderr.contains("Can only specify one of --read-partitions-invoke, --read-partitions-javascript, " + + "--read-partitions-xquery, --read-partitions-javascript-file, or --read-partitions-xquery-file."), "Unexpected stderr: " + stderr ); } diff --git a/new-tool-cli/src/test/java/com/marklogic/newtool/junit5/TwoWaySslConfigurer.java b/new-tool-cli/src/test/java/com/marklogic/newtool/junit5/TwoWaySslConfigurer.java index 40bb934c..fe6ff267 100644 --- a/new-tool-cli/src/test/java/com/marklogic/newtool/junit5/TwoWaySslConfigurer.java +++ b/new-tool-cli/src/test/java/com/marklogic/newtool/junit5/TwoWaySslConfigurer.java @@ -270,7 +270,7 @@ private File createKeystoreFile(Path tempDir) throws Exception { /** * Retrieves the server certificate associated with the certificate template for this test and stores it in the - * key store so that the key store can also act as a trust store. + * keystore so that the keystore can also act as a truststore. * * @param tempDir * @throws Exception