Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add forbidden api checker #418

Merged
merged 10 commits into from
Jun 22, 2024
7 changes: 7 additions & 0 deletions build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ plugins {
id("jacoco-report-aggregation")
id("org.sonarqube")
id("com.github.ben-manes.versions") version "0.51.0"
id("de.thetaphi.forbiddenapis")
mfvanek marked this conversation as resolved.
Show resolved Hide resolved
}

description = "pg-index-health build"
Expand Down Expand Up @@ -68,3 +69,9 @@ tasks.named<DependencyUpdatesTask>("dependencyUpdates").configure {
isNonStable(candidate.version)
}
}

forbiddenApis {
bundledSignatures = setOf("jdk-unsafe", "jdk-deprecated", "jdk-internal", "jdk-non-portable", "jdk-system-out", "jdk-reflection")
signaturesFiles = files("${rootDir}/config/forbidden-apis/forbidden-apis.txt")
ignoreFailures = false
}
1 change: 1 addition & 0 deletions buildSrc/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,5 @@ dependencies {
implementation("net.ltgt.gradle:gradle-errorprone-plugin:3.1.0")
implementation("info.solidsoft.gradle.pitest:gradle-pitest-plugin:1.15.0")
implementation("org.gradle:test-retry-gradle-plugin:1.5.9")
implementation("de.thetaphi:forbiddenapis:3.7")
}
15 changes: 15 additions & 0 deletions buildSrc/src/main/kotlin/pg-index-health.forbidden-apis.gradle.kts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis

plugins {
id("de.thetaphi.forbiddenapis")
}

forbiddenApis {
bundledSignatures = setOf("jdk-unsafe", "jdk-deprecated", "jdk-internal", "jdk-non-portable", "jdk-system-out", "jdk-reflection")
signaturesFiles = files("${rootDir}/config/forbidden-apis/forbidden-apis.txt")
mfvanek marked this conversation as resolved.
Show resolved Hide resolved
ignoreFailures = false
}

tasks.named("check") {
dependsOn("forbiddenApis")
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,11 @@ plugins {
id("java-test-fixtures")
id("net.ltgt.errorprone")
id("org.gradle.test-retry")
id("de.thetaphi.forbiddenapis")
}

dependencies {
checkstyle("com.thomasjensen.checkstyle.addons:checkstyle-addons:7.0.1")
implementation("de.thetaphi:forbiddenapis:3.7")
mfvanek marked this conversation as resolved.
Show resolved Hide resolved

errorprone("com.google.errorprone:error_prone_core:2.27.1")
errorprone("jp.skypencil.errorprone.slf4j:errorprone-slf4j:0.1.24")
Expand All @@ -36,7 +37,7 @@ tasks.withType<JavaCompile>().configureEach {

tasks {
test {
dependsOn(checkstyleMain, checkstyleTest, pmdMain, pmdTest, spotbugsMain, spotbugsTest)
dependsOn(checkstyleMain, checkstyleTest, pmdMain, pmdTest, spotbugsMain, spotbugsTest, forbiddenApisMain, forbiddenApisTest)
mfvanek marked this conversation as resolved.
Show resolved Hide resolved
}

withType<Test>().configureEach {
Expand Down Expand Up @@ -95,3 +96,9 @@ spotbugs {
reportLevel.set(Confidence.LOW)
excludeFilter.set(file("../config/spotbugs/exclude.xml"))
}

forbiddenApis {
mfvanek marked this conversation as resolved.
Show resolved Hide resolved
bundledSignatures = setOf("jdk-unsafe", "jdk-deprecated", "jdk-internal", "jdk-non-portable", "jdk-system-out", "jdk-reflection")
signaturesFiles = files("${rootDir}/config/forbidden-apis/forbidden-apis.txt")
ignoreFailures = false
}
5 changes: 0 additions & 5 deletions config/checkstyle/checkstyle.xml
Original file line number Diff line number Diff line change
Expand Up @@ -302,10 +302,5 @@
<module name="CommentsIndentation">
<property name="tokens" value="SINGLE_LINE_COMMENT, BLOCK_COMMENT_BEGIN"/>
</module>
<module name="IllegalMethodCall">
mfvanek marked this conversation as resolved.
Show resolved Hide resolved
<property name="illegalMethodNames"
value="emptyList,emptyMap,emptySet,emptyNavigableMap,emptyNavigableSet,emptySortedMap,emptySortedSet,singleton,singletonList,singletonMap,asList,unmodifiableList,unmodifiableSet"/>
<property name="excludedQualifiers" value=""/>
</module>
</module>
</module>
14 changes: 14 additions & 0 deletions config/forbidden-apis/forbidden-apis.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
@defaultMessage Avoid using these methods, consider alternative approaches
java.util.Collections#emptyList()
java.util.Collections#emptyMap()
java.util.Collections#emptySet()
java.util.Collections#emptyNavigableMap()
java.util.Collections#emptyNavigableSet()
java.util.Collections#emptySortedMap()
java.util.Collections#emptySortedSet()
java.util.Collections#singleton(java.lang.Object)
java.util.Collections#singletonList(java.lang.Object)
java.util.Collections#singletonMap(java.lang.Object,java.lang.Object)
java.util.Arrays#asList(java.lang.Object[])
java.util.Collections#unmodifiableList(java.util.List)
java.util.Collections#unmodifiableSet(java.util.Set)
Original file line number Diff line number Diff line change
Expand Up @@ -24,25 +24,25 @@
*/
public enum Diagnostic {

BLOATED_INDEXES(ExecutionTopology.ON_PRIMARY, "bloated_indexes.sql", QueryExecutors::executeQueryWithBloatThreshold),
BLOATED_TABLES(ExecutionTopology.ON_PRIMARY, "bloated_tables.sql", QueryExecutors::executeQueryWithBloatThreshold),
DUPLICATED_INDEXES(ExecutionTopology.ON_PRIMARY, "duplicated_indexes.sql", QueryExecutors::executeQueryWithSchema),
FOREIGN_KEYS_WITHOUT_INDEX(ExecutionTopology.ON_PRIMARY, "foreign_keys_without_index.sql", QueryExecutors::executeQueryWithSchema),
INDEXES_WITH_NULL_VALUES(ExecutionTopology.ON_PRIMARY, "indexes_with_null_values.sql", QueryExecutors::executeQueryWithSchema),
INTERSECTED_INDEXES(ExecutionTopology.ON_PRIMARY, "intersected_indexes.sql", QueryExecutors::executeQueryWithSchema),
INVALID_INDEXES(ExecutionTopology.ON_PRIMARY, "invalid_indexes.sql", QueryExecutors::executeQueryWithSchema),
TABLES_WITH_MISSING_INDEXES(ExecutionTopology.ACROSS_CLUSTER, "tables_with_missing_indexes.sql", QueryExecutors::executeQueryWithSchema),
TABLES_WITHOUT_PRIMARY_KEY(ExecutionTopology.ON_PRIMARY, "tables_without_primary_key.sql", QueryExecutors::executeQueryWithSchema),
UNUSED_INDEXES(ExecutionTopology.ACROSS_CLUSTER, "unused_indexes.sql", QueryExecutors::executeQueryWithSchema),
TABLES_WITHOUT_DESCRIPTION(ExecutionTopology.ON_PRIMARY, "tables_without_description.sql", QueryExecutors::executeQueryWithSchema),
COLUMNS_WITHOUT_DESCRIPTION(ExecutionTopology.ON_PRIMARY, "columns_without_description.sql", QueryExecutors::executeQueryWithSchema),
COLUMNS_WITH_JSON_TYPE(ExecutionTopology.ON_PRIMARY, "columns_with_json_type.sql", QueryExecutors::executeQueryWithSchema),
COLUMNS_WITH_SERIAL_TYPES(ExecutionTopology.ON_PRIMARY, "non_primary_key_columns_with_serial_types.sql", QueryExecutors::executeQueryWithSchema),
FUNCTIONS_WITHOUT_DESCRIPTION(ExecutionTopology.ON_PRIMARY, "functions_without_description.sql", QueryExecutors::executeQueryWithSchema),
INDEXES_WITH_BOOLEAN(ExecutionTopology.ON_PRIMARY, "indexes_with_boolean.sql", QueryExecutors::executeQueryWithSchema),
NOT_VALID_CONSTRAINTS(ExecutionTopology.ON_PRIMARY, "check_not_valid_constraints.sql", QueryExecutors::executeQueryWithSchema),
BTREE_INDEXES_ON_ARRAY_COLUMNS(ExecutionTopology.ON_PRIMARY, "btree_indexes_on_array_columns.sql", QueryExecutors::executeQueryWithSchema),
SEQUENCE_OVERFLOW(ExecutionTopology.ON_PRIMARY, "sequence_overflow.sql", QueryExecutors::executeQueryWithRemainingPercentageThreshold);
BLOATED_INDEXES(ExecutionTopology.ON_PRIMARY, "sql/bloated_indexes.sql", QueryExecutors::executeQueryWithBloatThreshold),
mfvanek marked this conversation as resolved.
Show resolved Hide resolved
BLOATED_TABLES(ExecutionTopology.ON_PRIMARY, "sql/bloated_tables.sql", QueryExecutors::executeQueryWithBloatThreshold),
DUPLICATED_INDEXES(ExecutionTopology.ON_PRIMARY, "sql/duplicated_indexes.sql", QueryExecutors::executeQueryWithSchema),
FOREIGN_KEYS_WITHOUT_INDEX(ExecutionTopology.ON_PRIMARY, "sql/foreign_keys_without_index.sql", QueryExecutors::executeQueryWithSchema),
INDEXES_WITH_NULL_VALUES(ExecutionTopology.ON_PRIMARY, "sql/indexes_with_null_values.sql", QueryExecutors::executeQueryWithSchema),
INTERSECTED_INDEXES(ExecutionTopology.ON_PRIMARY, "sql/intersected_indexes.sql", QueryExecutors::executeQueryWithSchema),
INVALID_INDEXES(ExecutionTopology.ON_PRIMARY, "sql/invalid_indexes.sql", QueryExecutors::executeQueryWithSchema),
TABLES_WITH_MISSING_INDEXES(ExecutionTopology.ACROSS_CLUSTER, "sql/tables_with_missing_indexes.sql", QueryExecutors::executeQueryWithSchema),
TABLES_WITHOUT_PRIMARY_KEY(ExecutionTopology.ON_PRIMARY, "sql/tables_without_primary_key.sql", QueryExecutors::executeQueryWithSchema),
UNUSED_INDEXES(ExecutionTopology.ACROSS_CLUSTER, "sql/unused_indexes.sql", QueryExecutors::executeQueryWithSchema),
TABLES_WITHOUT_DESCRIPTION(ExecutionTopology.ON_PRIMARY, "sql/tables_without_description.sql", QueryExecutors::executeQueryWithSchema),
COLUMNS_WITHOUT_DESCRIPTION(ExecutionTopology.ON_PRIMARY, "sql/columns_without_description.sql", QueryExecutors::executeQueryWithSchema),
COLUMNS_WITH_JSON_TYPE(ExecutionTopology.ON_PRIMARY, "sql/columns_with_json_type.sql", QueryExecutors::executeQueryWithSchema),
COLUMNS_WITH_SERIAL_TYPES(ExecutionTopology.ON_PRIMARY, "sql/non_primary_key_columns_with_serial_types.sql", QueryExecutors::executeQueryWithSchema),
FUNCTIONS_WITHOUT_DESCRIPTION(ExecutionTopology.ON_PRIMARY, "sql/functions_without_description.sql", QueryExecutors::executeQueryWithSchema),
INDEXES_WITH_BOOLEAN(ExecutionTopology.ON_PRIMARY, "sql/indexes_with_boolean.sql", QueryExecutors::executeQueryWithSchema),
NOT_VALID_CONSTRAINTS(ExecutionTopology.ON_PRIMARY, "sql/check_not_valid_constraints.sql", QueryExecutors::executeQueryWithSchema),
BTREE_INDEXES_ON_ARRAY_COLUMNS(ExecutionTopology.ON_PRIMARY, "sql/btree_indexes_on_array_columns.sql", QueryExecutors::executeQueryWithSchema),
SEQUENCE_OVERFLOW(ExecutionTopology.ON_PRIMARY, "sql/sequence_overflow.sql", QueryExecutors::executeQueryWithRemainingPercentageThreshold);

private final ExecutionTopology executionTopology;
private final String sqlQueryFileName;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
Expand Down Expand Up @@ -80,7 +81,7 @@ public PgParam getParamCurrentValue(@Nonnull final ParamNameAware paramName) {

@Nonnull
private PgParam getCurrentValue(@Nonnull final String paramName) {
final String sqlQuery = String.format("show %s;", paramName);
final String sqlQuery = String.format(Locale.ROOT, "show %s;", paramName);
final List<PgParam> params = QueryExecutors.executeQuery(pgConnection, sqlQuery, rs -> {
final String currentValue = rs.getString(paramName);
return PgParamImpl.of(paramName, currentValue);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;

import java.util.Locale;

import static io.github.mfvanek.pg.support.AbstractCheckOnHostAssert.assertThat;

class ColumnsWithSerialTypesCheckOnHostTest extends DatabaseAwareTestBase {
Expand All @@ -44,9 +46,9 @@ void onDatabaseWithThem(final String schemaName) {
.hasSize(2)
.containsExactly(
ColumnWithSerialType.ofBigSerial(
Column.ofNotNull(ctx.enrichWithSchema("bad_accounts"), "real_account_id"), String.format("%s.bad_accounts_real_account_id_seq", schemaName)),
Column.ofNotNull(ctx.enrichWithSchema("bad_accounts"), "real_account_id"), String.format(Locale.ROOT, "%s.bad_accounts_real_account_id_seq", schemaName)),
mfvanek marked this conversation as resolved.
Show resolved Hide resolved
ColumnWithSerialType.ofBigSerial(
Column.ofNotNull(ctx.enrichWithSchema("bad_accounts"), "real_client_id"), String.format("%s.bad_accounts_real_client_id_seq", schemaName))
Column.ofNotNull(ctx.enrichWithSchema("bad_accounts"), "real_client_id"), String.format(Locale.ROOT, "%s.bad_accounts_real_client_id_seq", schemaName))
));
}

Expand All @@ -59,7 +61,7 @@ void shouldIgnoreDroppedColumns(final String schemaName) {
.hasSize(1)
.containsExactly(
ColumnWithSerialType.ofBigSerial(
Column.ofNotNull(ctx.enrichWithSchema("bad_accounts"), "real_client_id"), String.format("%s.bad_accounts_real_client_id_seq", schemaName))
Column.ofNotNull(ctx.enrichWithSchema("bad_accounts"), "real_client_id"), String.format(Locale.ROOT, "%s.bad_accounts_real_client_id_seq", schemaName))
));
}

Expand All @@ -81,7 +83,7 @@ void shouldDetectSerialColumnsWithUniqueConstraints(final String schemaName) {
.hasSize(1)
.containsExactly(
ColumnWithSerialType.ofBigSerial(
Column.ofNotNull(ctx.enrichWithSchema("one_more_table"), "id"), String.format("%s.one_more_table_id_seq", schemaName))
Column.ofNotNull(ctx.enrichWithSchema("one_more_table"), "id"), String.format(Locale.ROOT, "%s.one_more_table_id_seq", schemaName))
));
}

Expand All @@ -94,11 +96,11 @@ void shouldDetectPrimaryKeysThatAreForeignKeysAsWell(final String schemaName) {
.hasSize(3)
.containsExactly(
ColumnWithSerialType.ofBigSerial(
Column.ofNotNull(ctx.enrichWithSchema("one_more_table"), "id"), String.format("%s.one_more_table_id_seq", schemaName)),
Column.ofNotNull(ctx.enrichWithSchema("one_more_table"), "id"), String.format(Locale.ROOT, "%s.one_more_table_id_seq", schemaName)),
ColumnWithSerialType.ofBigSerial(
Column.ofNotNull(ctx.enrichWithSchema("test_table"), "id"), String.format("%s.test_table_id_seq", schemaName)),
Column.ofNotNull(ctx.enrichWithSchema("test_table"), "id"), String.format(Locale.ROOT, "%s.test_table_id_seq", schemaName)),
ColumnWithSerialType.ofBigSerial(
Column.ofNotNull(ctx.enrichWithSchema("test_table"), "num"), String.format("%s.test_table_num_seq", schemaName))
Column.ofNotNull(ctx.enrichWithSchema("test_table"), "num"), String.format(Locale.ROOT, "%s.test_table_num_seq", schemaName))
));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.junit.jupiter.params.provider.ValueSource;

import java.util.List;
import java.util.Locale;

import static io.github.mfvanek.pg.support.AbstractCheckOnHostAssert.assertThat;

Expand Down Expand Up @@ -51,7 +52,7 @@ void onDatabaseWithThem(final String schemaName) {

ExecuteUtils.executeOnDatabase(getDataSource(), statement -> {
for (final Constraint constraint : notValidConstraints) {
statement.execute(String.format("alter table %s validate constraint %s;",
statement.execute(String.format(Locale.ROOT, "alter table %s validate constraint %s;",
constraint.getTableName(), constraint.getConstraintName()));
}
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;

import java.util.Locale;
import javax.annotation.Nonnull;
import javax.sql.DataSource;

Expand Down Expand Up @@ -51,6 +52,6 @@ void createDataSource() {

@Nonnull
private String getWriteUrl() {
return String.format("jdbc:postgresql://localhost:%d/postgres?prepareThreshold=0&preparedStatementCacheQueries=0", 6432);
return String.format(Locale.ROOT, "jdbc:postgresql://localhost:%d/postgres?prepareThreshold=0&preparedStatementCacheQueries=0", 6432);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;

import java.util.Locale;
import javax.sql.DataSource;

import static org.assertj.core.api.Assertions.assertThat;
Expand All @@ -34,7 +35,7 @@ void getPrimaryDataSource() {
@Test
void isPrimaryForAnyHost() {
final int port = getPort();
final String readUrl = String.format("jdbc:postgresql://localhost:%d/postgres?" +
final String readUrl = String.format(Locale.ROOT, "jdbc:postgresql://localhost:%d/postgres?" +
"prepareThreshold=0&preparedStatementCacheQueries=0&targetServerType=preferSecondary", port);
final PgConnection any = PgConnectionImpl.of(getDataSource(), PgHostImpl.ofUrl(readUrl));
assertThat(any).isNotNull();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Locale;
import javax.sql.DataSource;

import static org.assertj.core.api.Assertions.assertThat;
Expand Down Expand Up @@ -61,7 +62,7 @@ void withInvalidArgument() {

@Test
void isPrimaryForSecondaryHost() {
final String readUrl = String.format("jdbc:postgresql://localhost:%d/postgres?" +
final String readUrl = String.format(Locale.ROOT, "jdbc:postgresql://localhost:%d/postgres?" +
"prepareThreshold=0&preparedStatementCacheQueries=0&targetServerType=secondary", getPort());
final PgConnection secondary = PgConnectionImpl.of(getDataSource(), PgHostImpl.ofUrl(readUrl));
assertThat(secondary).isNotNull();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import io.github.mfvanek.pg.connection.HighAvailabilityPgConnection;
import io.github.mfvanek.pg.model.DbObject;

import java.util.Locale;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Stream;
Expand Down Expand Up @@ -74,11 +75,15 @@ public DatabaseChecks(@Nonnull final HighAvailabilityPgConnection haPgConnection
public <T extends DbObject> DatabaseCheckOnCluster<T> getCheck(@Nonnull final Diagnostic diagnostic, @Nonnull final Class<T> type) {
final DatabaseCheckOnCluster<?> check = checks.get(diagnostic);
if (check == null) {
throw new IllegalStateException(String.format("Check for diagnostic %s not found", diagnostic));
throw new IllegalStateException(String.format(Locale.ROOT, "Check for diagnostic %s not found", diagnostic));
}
if (!type.isAssignableFrom(check.getType())) {
throw new IllegalArgumentException("Illegal type: " + type);
}
return (DatabaseCheckOnCluster<T>) check;
}

public void clearChecks() {
mfvanek marked this conversation as resolved.
Show resolved Hide resolved
checks.clear();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,6 @@
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;

import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import javax.annotation.Nonnull;

import static org.assertj.core.api.Assertions.assertThat;
Expand Down Expand Up @@ -52,13 +49,9 @@ void completenessTest(@Nonnull final Diagnostic diagnostic) {
}

@Test
void shouldThrowExceptionIfCheckNotFound() throws NoSuchFieldException, IllegalAccessException, NoSuchMethodException, InvocationTargetException {
void shouldThrowExceptionIfCheckNotFound() {
final DatabaseChecks databaseChecks = new DatabaseChecks(getHaPgConnection());
final Field field = databaseChecks.getClass().getDeclaredField("checks");
field.setAccessible(true);
final Object fieldValue = field.get(databaseChecks);
final Method clearMethod = fieldValue.getClass().getDeclaredMethod("clear");
clearMethod.invoke(fieldValue);
databaseChecks.clearChecks();

assertThatThrownBy(() -> databaseChecks.getCheck(Diagnostic.INVALID_INDEXES, Index.class))
.isInstanceOf(IllegalStateException.class)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
package io.github.mfvanek.pg.model.column;

import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Nonnull;
Expand Down Expand Up @@ -78,7 +79,7 @@ public static SerialType valueFrom(@Nonnull final String pgColumnType) {
Objects.requireNonNull(pgColumnType, "pgColumnType cannot be null");
final SerialType serialType = VALUES.get(pgColumnType);
if (serialType == null) {
throw new IllegalArgumentException(String.format("pgColumnType = '%s'", pgColumnType));
throw new IllegalArgumentException(String.format(Locale.ROOT, "pgColumnType = '%s'", pgColumnType));
}
return serialType;
}
Expand Down
Loading
Loading