Skip to content

Commit

Permalink
- fix failed tests
Browse files Browse the repository at this point in the history
  • Loading branch information
psmagin committed Feb 14, 2024
1 parent 8fe29e2 commit 124ea3f
Show file tree
Hide file tree
Showing 44 changed files with 116 additions and 151 deletions.
11 changes: 9 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,13 @@

<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-bom</artifactId>
<version>2.22.0</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-stack-depchain</artifactId>
Expand Down Expand Up @@ -228,8 +235,7 @@
</dependency>
<dependency> <!-- for testcontainers -->
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>2.22.1</version>
<artifactId>log4j-slf4j2-impl</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
Expand All @@ -242,6 +248,7 @@

<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
Expand Down
22 changes: 10 additions & 12 deletions src/main/java/org/folio/rest/impl/InstanceStorageBatchApi.java
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,13 @@

import com.google.common.collect.Lists;
import io.vertx.core.AsyncResult;
import io.vertx.core.CompositeFuture;
import io.vertx.core.Context;
import io.vertx.core.Future;
import io.vertx.core.Handler;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import javax.ws.rs.core.Response;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand Down Expand Up @@ -102,12 +100,12 @@ public void postInstanceStorageBatchInstances(Instances entity,
* @return succeeded future containing the list of completed (failed and succeeded)
* individual result futures, one per instance
*/
private Future<List<Future>> executeInBatch(List<Instance> instances,
Function<List<Instance>, Future<List<Future>>> action) {
List<Future> totalFutures = new ArrayList<>();
private Future<List<Future<Instance>>> executeInBatch(List<Instance> instances,
Function<List<Instance>, Future<List<Future<Instance>>>> action) {
List<Future<Instance>> totalFutures = new ArrayList<>();

List<List<Instance>> batches = Lists.partition(instances, PARALLEL_DB_CONNECTIONS_LIMIT);
Future<List<Future>> future = succeededFuture();
Future<List<Future<Instance>>> future = succeededFuture();
for (List<Instance> batch : batches) {
future = future.compose(x -> action.apply(batch))
.onSuccess(totalFutures::addAll);
Expand All @@ -122,12 +120,12 @@ private Future<List<Future>> executeInBatch(List<Instance> instances,
* @param postgresClient Postgres Client
* @return succeeded future containing the list of completed (succeeded and failed) individual result futures
*/
private Future<List<Future>> saveInstances(List<Instance> instances, PostgresClient postgresClient) {
List<Future> futures = instances.stream()
private Future<List<Future<Instance>>> saveInstances(List<Instance> instances, PostgresClient postgresClient) {
List<Future<Instance>> futures = instances.stream()
.map(instance -> saveInstance(instance, postgresClient))
.collect(Collectors.toList());
.toList();

return CompositeFuture.join(futures)
return Future.join(futures)
// on success and on failure return succeeding future with list of all (succeeded and failed) futures
.map(futures)
.otherwise(futures);
Expand Down Expand Up @@ -165,14 +163,14 @@ private Future<Instance> saveInstance(Instance instance, PostgresClient postgres
* @param saveFutures list of completed individual result futures
* @return InstancesBatchResponse
*/
private InstancesBatchResponse constructResponse(List<Future> saveFutures) {
private InstancesBatchResponse constructResponse(List<Future<Instance>> saveFutures) {
InstancesBatchResponse response = new InstancesBatchResponse();

saveFutures.forEach(save -> {
if (save.failed()) {
response.getErrorMessages().add(save.cause().getMessage());
} else {
response.getInstances().add((Instance) save.result());
response.getInstances().add(save.result());
}
});

Expand Down
5 changes: 2 additions & 3 deletions src/main/java/org/folio/rest/impl/LocationApi.java
Original file line number Diff line number Diff line change
Expand Up @@ -201,9 +201,8 @@ public void deleteLocationsById(

@SafeVarargs
private CompositeFuture runLocationChecks(Future<LocationCheckError>... futures) {
@SuppressWarnings("rawtypes")
List<Future> allFutures = new ArrayList<>(Arrays.asList(futures));
return CompositeFuture.all(allFutures);
List<Future<LocationCheckError>> allFutures = new ArrayList<>(Arrays.asList(futures));
return Future.all(allFutures);
}

private Future<LocationCheckError> checkIdProvided(Location entity) {
Expand Down
3 changes: 1 addition & 2 deletions src/main/java/org/folio/rest/support/CollectionUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.Collection;
import java.util.Collections;
import java.util.stream.Collectors;
import org.folio.dbschema.ObjectMapperTool;

public final class CollectionUtil {
Expand All @@ -22,7 +21,7 @@ public static <T> Collection<T> deepCopy(Collection<T> collection, Class<T> type

return collection.stream()
.map(r -> clone(r, type))
.collect(Collectors.toList());
.toList();
}

public static <T> T getFirst(Collection<T> collection) {
Expand Down
3 changes: 1 addition & 2 deletions src/main/java/org/folio/rest/support/CqlQuery.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,9 @@ public boolean isMatchingAll() {
} catch (Exception e) {
return false;
}
if (!(cqlNode instanceof CQLTermNode)) {
if (!(cqlNode instanceof CQLTermNode node)) {
return false;
}
var node = (CQLTermNode) cqlNode;
// cql.allRecords: A special index which matches every record available. Every record is matched no matter what
// values are provided for the relation and term, but the recommended syntax is: cql.allRecords = 1
// http://docs.oasis-open.org/search-ws/searchRetrieve/v1.0/os/part5-cql/searchRetrieve-v1.0-os-part5-cql.html#_Toc324166821
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ public Future<List<Item>> populateEffectiveValues(List<Item> items) {
return getHoldingsRecordsForItems(items)
.map(holdingsRecordMap -> items.stream()
.map(item -> populateEffectiveValues(item, holdingsRecordMap.get(item.getHoldingsRecordId())))
.collect(Collectors.toList()));
.toList());
}

public Future<Item> populateEffectiveValues(Item item) {
Expand Down
2 changes: 1 addition & 1 deletion src/main/java/org/folio/services/SuDocCallNumber.java
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ private void appendWithSymbolIfNeeded(StringBuilder key, String cnPart) {
}
var parts = cnPart.split("[./ -]");
for (String part : parts) {
if (key.length() > 0) {
if (!key.isEmpty()) {
key.append(' ');
}
part = part.trim();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package org.folio.services.domainevent;

import static io.vertx.core.CompositeFuture.all;
import static io.vertx.core.Future.all;
import static io.vertx.core.Future.succeededFuture;
import static org.apache.logging.log4j.LogManager.getLogger;
import static org.folio.rest.tools.utils.TenantTool.tenantId;
Expand All @@ -20,7 +20,6 @@
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
import java.util.function.LongFunction;
import java.util.stream.Collectors;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.lang3.tuple.Triple;
import org.apache.logging.log4j.Logger;
Expand Down Expand Up @@ -124,7 +123,7 @@ Future<Void> publishRecordsUpdated(Collection<Triple<String, T, T>> updatedRecor

return all(updatedRecords.stream()
.map(triple -> publishRecordUpdated(triple.getLeft(), triple.getMiddle(), triple.getRight()))
.collect(Collectors.toList()))
.toList())
.map(notUsed -> null);
}

Expand All @@ -141,7 +140,7 @@ Future<Void> publishRecordsCreated(List<Pair<String, T>> records) {

return all(records.stream()
.map(pair -> publishRecordCreated(pair.getKey(), pair.getValue()))
.collect(Collectors.toList()))
.toList())
.map(notUsed -> null);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.logging.log4j.Logger;
import org.folio.persist.InstanceRepository;
Expand All @@ -35,14 +34,14 @@ public Future<Void> publishInstancesCreated(List<Instance> instances) {

return domainEventService.publishRecordsCreated(instances.stream()
.map(instance -> pair(instance.getId(), instance))
.collect(Collectors.toList()));
.toList());
}

@Override
protected Future<List<Pair<String, Instance>>> getInstanceIds(Collection<Instance> instances) {
return succeededFuture(instances.stream()
.map(instance -> pair(instance.getId(), instance))
.collect(Collectors.toList()));
.toList());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ private CompositeFuture createShadowInstancesIfNeeded(List<HoldingsRecord> holdi
String instanceId = holdingRecord.getInstanceId();
instanceFuturesMap.computeIfAbsent(instanceId, v -> createShadowInstanceIfNeeded(instanceId, consortiumData));
}
return CompositeFuture.all(new ArrayList<>(instanceFuturesMap.values()));
return Future.all(new ArrayList<>(instanceFuturesMap.values()));
}

private Future<SharingInstance> createShadowInstanceIfNeeded(String instanceId, ConsortiumData consortiumData) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.folio.persist.AsyncMigrationJobRepository;
import org.folio.rest.jaxrs.model.AsyncMigration;
Expand Down Expand Up @@ -172,13 +171,13 @@ public Future<AsyncMigrationJob> logJobProcessed(String migrationName, String jo
private List<AsyncMigrationJobRunner> getMigrationJobRunnersByName(List<String> migrationNames) {
return MIGRATION_JOB_RUNNERS.stream()
.filter(runners -> migrationNames.contains(runners.getMigrationName()))
.collect(Collectors.toList());
.toList();
}

private boolean isJobAvailable(AsyncMigrationJobRequest jobRequest) {
var availableMigrations = getAvailableMigrations().getAsyncMigrations()
.stream().flatMap(v -> Stream.of(v.getMigrations())).collect(Collectors.toList())
.stream().flatMap(List::stream).collect(Collectors.toList());
.stream().flatMap(v -> Stream.of(v.getMigrations())).toList()
.stream().flatMap(List::stream).toList();
return availableMigrations.containsAll(jobRequest.getMigrations());
}

Expand All @@ -188,7 +187,7 @@ private AsyncMigrationJob buildInitialJob(AsyncMigrationJobRequest request) {
jobRunners.forEach(asyncMigrationJobRunner -> affectedEntities.addAll(asyncMigrationJobRunner
.getAffectedEntities()
.stream().map(Enum::name)
.collect(Collectors.toList())));
.toList()));
return new AsyncMigrationJob()
.withJobStatus(AsyncMigrationJob.JobStatus.IN_PROGRESS)
.withMigrations(request.getMigrations())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.collections4.map.CaseInsensitiveMap;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
Expand Down Expand Up @@ -59,9 +58,9 @@ public static Handler<KafkaConsumerRecords<String, JsonObject>> pollAsyncMigrati
.map(javaMigration -> javaMigration.runMigrationForIds(ids)
.onSuccess(notUsed -> jobService.logJobProcessed(migrationName, migrationJob.getId(), ids.size()))
.onFailure(notUsed -> jobService.logJobFail(migrationJob.getId())))
.collect(Collectors.toList());
.toList();
return CompositeFuture.all(new ArrayList<>(startedMigrations));
}).collect(Collectors.toList());
}).toList();

CompositeFuture.all(new ArrayList<>(migrations))
.onSuccess(composite -> consumer.commit())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ protected Future<Integer> updateBatch(List<Row> batch, SQLConnection connection)
var instances = batch.stream()
.map(row -> rowToClass(row, Instance.class))
.peek(valuesService::populatePublicationPeriod)
.collect(Collectors.toList());
.toList();
return instanceRepository.updateBatch(instances, connection)
.map(notUsed -> instances.size());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ protected Future<Integer> updateBatch(List<Row> batch, SQLConnection connection)
var instances = batch.stream()
.map(row -> row.getJsonObject("jsonb"))
.map(json -> json.mapTo(Instance.class))
.collect(Collectors.toList());
.toList();
return instanceRepository.updateBatch(instances, connection)
.map(notUsed -> instances.size());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import io.vertx.sqlclient.RowStream;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.folio.persist.ItemRepository;
import org.folio.rest.jaxrs.model.Item;
import org.folio.rest.persist.PgUtil;
Expand Down Expand Up @@ -45,7 +44,7 @@ protected Future<Integer> updateBatch(List<Row> batch, SQLConnection connection)
var items = batch.stream()
.map(row -> rowToClass(row, Item.class))
.map(EffectiveCallNumberComponentsUtil::calculateAndSetEffectiveShelvingOrder)
.collect(Collectors.toList());
.toList();

return itemRepository.updateBatch(items, connection).map(notUsed -> items.size());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,8 @@ public void shouldRespondWith500StatusWhenErrorsOccursWhilstFetchingRecords(Test
RoutingContext routingContext = mock(RoutingContext.class);
when(routingContext.response()).thenReturn(mock(HttpServerResponse.class));
new MyAbstractInstanceRecordsApi().fetchRecordsByQuery("SELECT 1",
routingContext, null, testContext.asyncAssertSuccess(response -> {
assertThat(response.getStatus(), is(500));
}));
routingContext, null,
testContext.asyncAssertSuccess(response -> assertThat(response.getStatus(), is(500))));
}

@Test
Expand Down
4 changes: 2 additions & 2 deletions src/test/java/org/folio/rest/api/AsyncMigrationTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doReturn;
Expand Down Expand Up @@ -173,7 +173,7 @@ public void canGetAllAvailableMigrationJobs() {
.withMigrations(List.of("itemShelvingOrderMigration")));
AsyncMigrationJobCollection migrations = asyncMigration.getAllMigrationJobs();
assertNotNull(migrations);
assertTrue(migrations.getJobs().size() > 0);
assertFalse(migrations.getJobs().isEmpty());
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,14 @@
public class EffectiveLocationMigrationTest extends TestBaseWithInventoryUtil {
private static final String SET_EFFECTIVE_LOCATION = ResourceUtil
.asString("templates/db_scripts/setEffectiveHoldingsLocation.sql")
.replace("${myuniversity}_${mymodule}", "test_tenant_mod_inventory_storage");
.replace("${myuniversity}_${mymodule}", "test_mod_inventory_storage");
private static final Vertx VERTX = Vertx.vertx();
private static final UUID INSTANCE_ID = UUID.randomUUID();
private static final UUID HOLDINGS_ID = UUID.randomUUID();
private static final String REMOVE_EXISTING_FIELD =
"UPDATE test_tenant_mod_inventory_storage.holdings_record SET jsonb = jsonb - 'effectiveLocationId';";
"UPDATE test_mod_inventory_storage.holdings_record SET jsonb = jsonb - 'effectiveLocationId';";
private static final String QUERY =
"SELECT jsonb FROM test_tenant_mod_inventory_storage.holdings_record WHERE id = '" + HOLDINGS_ID + "';";
"SELECT jsonb FROM test_mod_inventory_storage.holdings_record WHERE id = '" + HOLDINGS_ID + "';";

@Before
public void beforeEach() {
Expand Down
Loading

0 comments on commit 124ea3f

Please sign in to comment.