Skip to content

Commit

Permalink
Merge branch 'hotfix-1.31.11'
Browse files Browse the repository at this point in the history
  • Loading branch information
arteymix committed Sep 20, 2024
2 parents ef09084 + a0532cc commit 83f4594
Show file tree
Hide file tree
Showing 244 changed files with 5,361 additions and 9,304 deletions.
32 changes: 31 additions & 1 deletion .jenkins/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ void setBuildStatus(String context, String message, String state) {
pipeline {
agent any
tools {
maven 'Maven 3.8.1'
maven 'Maven 3.1.1'
}
parameters {
string(name: 'DATA_DIR', defaultValue: '/space/gemmaData', description: 'Directory where production data is stored.')
Expand All @@ -34,6 +34,7 @@ pipeline {
choice(name: 'STAGING_BRANCH', choices: ['hotfix', 'release'], description: 'Branch deployed to the staging server and CLI symlink.')
// whether or not to force integration tests
booleanParam(name: 'FORCE_INTEGRATION_TESTS', defaultValue: false, description: 'Force integration tests to be performed regardless of the branch being built.')
booleanParam(name: 'FORCE_SONARQUBE', defaultValue: false, description: 'Force SonarQube analysis to be performed regardless of the branch being built.')
}
stages {
stage('Checkout scm') {
Expand Down Expand Up @@ -131,6 +132,7 @@ pipeline {
branch 'release-*'
branch 'development'
expression { params.FORCE_INTEGRATION_TESTS }
expression { params.FORCE_SONARQUBE }
}
}
parallel {
Expand Down Expand Up @@ -164,6 +166,34 @@ pipeline {
}
}
}
stage('SonarQube Analysis') {
when {
anyOf {
branch 'master'
branch 'hotfix-*'
branch 'release-*'
branch 'development'
expression { params.FORCE_SONARQUBE }
}
}
steps {
setBuildStatus "SonarQube Analysis", "SonarQube analysis for build #${env.BUILD_NUMBER} has started...", 'PENDING'
withSonarQubeEnv('UBC SonarQube') {
sh "mvn sonar:sonar -Dsonar.projectKey=mslg"
}
}
post {
success {
setBuildStatus "SonarQube analysis", "SonarQube analysis for build #${env.BUILD_NUMBER} have passed.", 'SUCCESS'
}
failure {
setBuildStatus "SonarQube analysis", "SonarQube analysis for build #${env.BUILD_NUMBER} failed.", 'ERROR'
}
unstable {
setBuildStatus "SonarQube analysis", "SonarQube analysis for build #${env.BUILD_NUMBER} failed.", 'FAILURE'
}
}
}
stage('Deploy artifacts') {
when {
anyOf {
Expand Down
2 changes: 1 addition & 1 deletion gemma-cli/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
<parent>
<artifactId>gemma</artifactId>
<groupId>gemma</groupId>
<version>1.31.10</version>
<version>1.31.11</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>gemma-cli</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import ubic.gemma.core.analysis.sequence.ProbeMapperConfig;
import ubic.gemma.core.config.Settings;
import ubic.gemma.core.goldenpath.GoldenPathSequenceAnalysis;
import ubic.gemma.core.loader.expression.arrayDesign.ArrayDesignProbeMapperService;
import ubic.gemma.core.util.AbstractCLI;
Expand All @@ -18,7 +18,6 @@
import ubic.gemma.model.expression.designElement.CompositeSequence;
import ubic.gemma.model.genome.Taxon;
import ubic.gemma.model.genome.sequenceAnalysis.BlatAssociation;
import ubic.gemma.persistence.service.common.auditAndSecurity.AuditTrailService;
import ubic.gemma.persistence.service.common.description.ExternalDatabaseService;
import ubic.gemma.persistence.service.expression.designElement.CompositeSequenceService;
import ubic.gemma.persistence.service.genome.taxon.TaxonService;
Expand Down Expand Up @@ -58,11 +57,19 @@ public class ArrayDesignProbeMapperCli extends ArrayDesignSequenceManipulatingCl
private final static String OPTION_REFSEQ = "r";

@Autowired
private AuditTrailService auditTrailService;
private TaxonService taxonService;
@Autowired
private ArrayDesignProbeMapperService arrayDesignProbeMapperService;
@Autowired
private ExternalDatabaseService eds;
@Autowired
private CompositeSequenceService compositeSequenceService;

@Value("${gemma.goldenpath.db.rat}")
private String goldenPathRatDbName;

private String[] probeNames = null;
private ProbeMapperConfig config;
private ArrayDesignProbeMapperService arrayDesignProbeMapperService;
private String directAnnotationInputFileName = null;
private boolean ncbiIds = false;
private ExternalDatabase sourceDatabase = null;
Expand All @@ -76,12 +83,6 @@ public class ArrayDesignProbeMapperCli extends ArrayDesignSequenceManipulatingCl
private Double identityThreshold = null;
private Double overlapThreshold = null;

@Override
public GemmaCLI.CommandGroup getCommandGroup() {
return GemmaCLI.CommandGroup.PLATFORM;
}

@SuppressWarnings({ "AccessStaticViaInstance", "static-access", "deprecation" })
@Override
protected void buildOptions( Options options ) {
super.buildOptions( options );
Expand Down Expand Up @@ -178,8 +179,6 @@ protected boolean requireLogin() {
return true;
}

private TaxonService taxonService;

/**
* See 'configure' for how the other options are handled. (non-Javadoc)
*
Expand All @@ -188,8 +187,6 @@ protected boolean requireLogin() {
@Override
protected void processOptions( CommandLine commandLine ) throws ParseException {
super.processOptions( commandLine );
arrayDesignProbeMapperService = this.getBean( ArrayDesignProbeMapperService.class );
taxonService = this.getBean( TaxonService.class );

if ( commandLine.hasOption( "import" ) ) {
if ( !commandLine.hasOption( 't' ) ) {
Expand All @@ -201,8 +198,6 @@ protected void processOptions( CommandLine commandLine ) throws ParseException {
}
String sourceDBName = commandLine.getOptionValue( "source" );

ExternalDatabaseService eds = this.getBean( ExternalDatabaseService.class );

this.sourceDatabase = eds.findByName( sourceDBName );

this.directAnnotationInputFileName = commandLine.getOptionValue( "import" );
Expand Down Expand Up @@ -269,7 +264,7 @@ protected void processOptions( CommandLine commandLine ) throws ParseException {
* Override to do additional checks to make sure the array design is in a state of readiness for probe mapping.
*/
@Override
boolean needToRun( Date skipIfLastRunLaterThan, ArrayDesign arrayDesign,
protected boolean needToRun( Date skipIfLastRunLaterThan, ArrayDesign arrayDesign,
Class<? extends ArrayDesignAnalysisEvent> eventClass ) {

if ( this.force ) {
Expand Down Expand Up @@ -537,8 +532,7 @@ private void configure( ArrayDesign arrayDesign ) {
isRat = taxon.getCommonName().equals( "rat" );
}

boolean isMissingTracks = isRat && Settings
.getString( "gemma.goldenpath.db.rat" ).startsWith( "rn" );
boolean isMissingTracks = isRat && goldenPathRatDbName.startsWith( "rn" );

if ( mirnaOnlyModeOption ) {
AbstractCLI.log.info( "Micro RNA only mode" );
Expand Down Expand Up @@ -649,7 +643,6 @@ private void processProbes( ArrayDesign arrayDesign ) {
assert this.probeNames != null && this.probeNames.length > 0;
arrayDesign = getArrayDesignService().thawLite( arrayDesign );
this.configure( arrayDesign );
CompositeSequenceService compositeSequenceService = this.getBean( CompositeSequenceService.class );

for ( String probeName : this.probeNames ) {
CompositeSequence probe = compositeSequenceService.findByName( arrayDesign, probeName );
Expand Down Expand Up @@ -706,7 +699,7 @@ public void run() {
}
}

protected Taxon getTaxonByName( CommandLine commandLine ) {
private Taxon getTaxonByName( CommandLine commandLine ) {
String taxonName = commandLine.getOptionValue( 't' );
ubic.gemma.model.genome.Taxon taxon = taxonService.findByCommonName( taxonName );
if ( taxon == null ) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ Collection<ArrayDesign> getRelatedDesigns( ArrayDesign design ) {
*/
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
// Better semantics
boolean needToRun( Date skipIfLastRunLaterThan, ArrayDesign arrayDesign,
protected boolean needToRun( Date skipIfLastRunLaterThan, ArrayDesign arrayDesign,
Class<? extends ArrayDesignAnalysisEvent> eventClass ) {

if ( isAutoSeek() ) {
Expand All @@ -192,7 +192,7 @@ boolean needToRun( Date skipIfLastRunLaterThan, ArrayDesign arrayDesign,
return true;

List<AuditEvent> events = this.getEvents( arrayDesign, eventClass );
if ( events.size() == 0 ) {
if ( events.isEmpty() ) {
return true; // always do it, it's never been done.
}
// return true if the last time was older than the limit time.
Expand All @@ -215,7 +215,7 @@ private void arraysFromCliList( CommandLine commandLine ) {
}
arrayDesignsToProcess.add( ad );
}
if ( arrayDesignsToProcess.size() == 0 ) {
if ( arrayDesignsToProcess.isEmpty() ) {
throw new RuntimeException( "There were no valid platforms specified" );
}
}
Expand Down Expand Up @@ -258,9 +258,9 @@ private boolean needToAutoRun( ArrayDesign arrayDesign, Class<? extends ArrayDes
}

List<AuditEvent> eventsOfCurrentType = this.getEvents( arrayDesign, eventClass );
List<AuditEvent> allEvents = ( List<AuditEvent> ) arrayDesign.getAuditTrail().getEvents();
List<AuditEvent> allEvents = arrayDesign.getAuditTrail().getEvents();

if ( eventsOfCurrentType.size() == 0 ) {
if ( eventsOfCurrentType.isEmpty() ) {
// it's never been run.
return true;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ protected void doWork() throws Exception {
log.info( " Value: " + value );

for ( FactorValueBasicValueObject fv : categoryMap.get( category ).get( value ) ) {
if ( fv.getMeasurement() != null ) continue; // don't list individual values.
if ( fv.getMeasurementObject() != null ) continue; // don't list individual values.

Long factor = fv.getId();
ExpressionExperimentValueObject expressionExperimentValueObject = ed2ee.get( factor2Design
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,8 +171,7 @@ protected void processOptions( CommandLine commandLine ) throws ParseException {
try {
this.pubmedIds = parsePubmedIdFile( this.pubmedIdFilename );
} catch ( IOException e ) {
log.error( e.getMessage() );
e.printStackTrace();
log.error( "Failed to parse PubMed ID file: " + this.pubmedIdFilename + ".", e );
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,21 @@
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.task.AsyncTaskExecutor;
import ubic.basecode.ontology.model.OntologyTerm;
import ubic.gemma.core.ontology.OntologyService;
import ubic.gemma.core.util.AbstractAuthenticatedCLI;
import ubic.gemma.core.util.AbstractCLI;
import ubic.gemma.model.common.description.Characteristic;

import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

public class FindObsoleteTermsCli extends AbstractCLI {
public class FindObsoleteTermsCli extends AbstractAuthenticatedCLI {

@Autowired
private OntologyService ontologyService;
Expand Down Expand Up @@ -90,13 +92,13 @@ protected void doWork() throws Exception {

log.info( "Ontologies warmed up, starting check..." );

Map<Characteristic, Long> vos = ontologyService.findObsoleteTermUsage();
Map<OntologyTerm, Long> vos = ontologyService.findObsoleteTermUsage( 4, TimeUnit.HOURS );

AbstractCLI.log.info( "Obsolete term check finished, printing ..." );

System.out.println( "Value\tValueUri\tCount" );
for ( Map.Entry<Characteristic, Long> vo : vos.entrySet() ) {
System.out.println( vo.getKey().getValue() + "\t" + vo.getKey().getValueUri() + "\t" + vo.getValue() );
for ( Map.Entry<OntologyTerm, Long> vo : vos.entrySet() ) {
System.out.println( vo.getKey().getLabel() + "\t" + vo.getKey().getUri() + "\t" + vo.getValue() );
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
package ubic.gemma.core.apps;

import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.task.AsyncTaskExecutor;
import ubic.basecode.ontology.model.OntologyTerm;
import ubic.gemma.core.ontology.OntologyService;
import ubic.gemma.core.util.AbstractAuthenticatedCLI;
import ubic.gemma.core.util.CLI;

import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

public class FixOntologyTermLabelsCli extends AbstractAuthenticatedCLI {

boolean dryRun = false;


@Autowired
private OntologyService ontologyService;

@Autowired
@Qualifier("ontologyTaskExecutor")
private AsyncTaskExecutor ontologyTaskExecutor;

@Value("${load.ontologies}")
private boolean autoLoadOntologies;

@Autowired
private List<ubic.basecode.ontology.providers.OntologyService> ontologies;

@Override
public GemmaCLI.CommandGroup getCommandGroup() {
return GemmaCLI.CommandGroup.METADATA;
}

@Override
public String getShortDesc() {
return "Check and correct characteristics & statements using the wrong label for an ontology term";
}

@Override
protected void processOptions( CommandLine commandLine ) {
if ( commandLine.hasOption( 'd' ) ) {
dryRun = true;
}
}

@Override
public String getCommandName() {
return "fixOntologyTermLabels";
}

@Override
protected void buildOptions( Options options ) {
options.addOption( "d", "dryRun", false, "Dry run, do not update the database [default: " + dryRun + "]" );
}

@Override
protected void doWork() throws Exception {
if ( autoLoadOntologies ) {
throw new IllegalArgumentException( "Auto-loading of ontologies is enabled, disable it by setting load.ontologies=false in Gemma.properties." );
}

List<ubic.basecode.ontology.providers.OntologyService> ontologiesLoading = new ArrayList<>( );

log.info( String.format( "Warming up %d ontologies ...", ontologies.size() ) );
CompletionService<ubic.basecode.ontology.providers.OntologyService> completionService = new ExecutorCompletionService<>( ontologyTaskExecutor );
Map<ubic.basecode.ontology.providers.OntologyService, Future<ubic.basecode.ontology.providers.OntologyService>> futures = new LinkedHashMap<>();
for ( ubic.basecode.ontology.providers.OntologyService ontology : ontologies ) {
ontologiesLoading.add( ontology );
futures.put( ontology, completionService.submit( () -> {
// we don't need all those features
ontology.setSearchEnabled( false );
ontology.setInferenceMode( ubic.basecode.ontology.providers.OntologyService.InferenceMode.NONE );
ontology.initialize( true, false );
return ontology;
} ) );
}

for ( int i = 0; i < ontologiesLoading.size(); i++ ) {
ubic.basecode.ontology.providers.OntologyService os = completionService.take().get();
log.info( String.format( " === Ontology (%d/%d) warmed up: %s", i + 1, ontologies.size(), os ) );
int remainingToLoad = ontologies.size() - ( i + 1 );
if ( remainingToLoad > 0 && remainingToLoad <= 5 ) {
log.info( "Still loading:\n\t" + futures.entrySet().stream().filter( e -> !e.getValue().isDone() )
.map( Map.Entry::getKey )
.map( ubic.basecode.ontology.providers.OntologyService::toString )
.collect( Collectors.joining( "\n\t" ) ) );
}
}

log.info( "Ontologies warmed up, starting check..." );

Map<String, OntologyTerm> mismatches = ontologyService.fixOntologyTermLabels( dryRun, 4, TimeUnit.HOURS );

System.out.println("Wrong_label\tCorrect_Label\tURI");
for ( Map.Entry<String, OntologyTerm> entry : mismatches.entrySet() ) {
System.out.println( entry.getKey() + "\t" + entry.getValue().getLabel() + "\t" + entry.getValue().getUri());
}


}
}
Loading

0 comments on commit 83f4594

Please sign in to comment.