Skip to content

Commit

Permalink
Merge branch 'develop' into pva/EVSRESTAPI-557-postman-content-qa
Browse files Browse the repository at this point in the history
  • Loading branch information
peter-va committed Feb 19, 2025
2 parents 98a469b + e6d86f0 commit ab02e8e
Show file tree
Hide file tree
Showing 10 changed files with 399 additions and 434 deletions.
16 changes: 15 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,21 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).

## [1.10.0.RELEASE] - 2024-06-18
## [2.0.0.RELEASE] - 2025-01-28
### Added
- FHIR R5 terminology endpoints for CodeSystem, ValueSet, and ConceptMap
- Added vulnerability scanning
- Added endpoint for URL mapping from terms browser to evs explore
- Add sending email to term form handling
### Changed
- Upgrade backend to Spring Boot 3 (and J17), Upgrade tests to Junit 5
- FHIR R4 improvements and alignment with spec
- More consistent error handling
- Fixes to swagger
- Improvements to SPARQL query handling
- Improvements to backend handling of "mappings" to use a separate index for mappings

## [1.10.0.RELEASE] - 2024-07-22
### Added
- FHIR R4 terminology endpoints for CodeSystem, ValueSet, and ConceptMap
- Additional search endpoints to support Sparql querying (of rdf-loaded terminologies)
Expand Down
23 changes: 23 additions & 0 deletions JENA.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# EVSRESTAPI - JENA SETUP

Information on using Apache jena/fuseki with EVSRESTAPI.

## Build and run a local docker image

Run the following command to build the Jena image:

```bash
cd docker/fuseki
docker build -t evsrestapi/fuseki:5.1.0 .
```

### Running Jena Locally (after data is loaded)

Start the container with the following command.
Note: you need the src mount path to a local directory of your choice to persist the data.

```bash
dir=c:/Users/carlsenbr/eclipse-workspace/data/fuseki
docker run -d --name=jena_evs --rm -p "3030:3030" -v"$dir":/opt/fuseki/run/databases evsrestapi/fuseki:5.1.0
```

33 changes: 33 additions & 0 deletions docker/fuseki/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
FROM bellsoft/liberica-openjre-alpine:17.0.13

# Set environment variables
ENV FUSEKI_VERSION=5.1.0
ENV FUSEKI_HOME=/opt/fuseki
ENV FUSEKI_BASE=/opt/fuseki/run

# Install necessary packages
RUN apk add --no-cache curl unzip bash

# Create directories
RUN mkdir -p ${FUSEKI_HOME} ${FUSEKI_BASE}

# Download and unzip Fuseki
RUN curl -L -o /tmp/apache-jena-fuseki-${FUSEKI_VERSION}.zip \
https://archive.apache.org/dist/jena/binaries/apache-jena-fuseki-${FUSEKI_VERSION}.zip && \
unzip /tmp/apache-jena-fuseki-${FUSEKI_VERSION}.zip -d /tmp && \
mv /tmp/apache-jena-fuseki-${FUSEKI_VERSION}/* ${FUSEKI_HOME} && \
rm /tmp/apache-jena-fuseki-${FUSEKI_VERSION}.zip

# Expose the default Fuseki port
EXPOSE 3030

# Set the working directory
WORKDIR ${FUSEKI_HOME}
COPY ./shiro.ini ${FUSEKI_HOME}/run/shiro.ini
# Start Fuseki
#CMD ["./fuseki-server", "--update", "--loc=/opt/fuseki/run/databases","/NCIT2"]
#CMD ["./fuseki", "start"]
#CMD ["./fuseki-server", "--update"]
COPY entrypoint.sh /opt/fuseki/entrypoint.sh
RUN chmod +x /opt/fuseki/entrypoint.sh
ENTRYPOINT ["/opt/fuseki/entrypoint.sh"]
16 changes: 16 additions & 0 deletions docker/fuseki/entrypoint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/bin/bash

# Start the fuseki-server
./fuseki start

# Wait for the server to start
sleep 10

# Create the NCIT2 dataset
curl -s -g -X POST -d "dbName=NCIT2&dbType=tdb2" "http://localhost:3030/\$/datasets"

# Create the CTRP dataset
curl -s -g -X POST -d "dbName=CTRP&dbType=tdb2" "http://localhost:3030/\$/datasets"

# Keep the container running
tail -f /dev/null
39 changes: 39 additions & 0 deletions docker/fuseki/shiro.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# Licensed under the terms of http://www.apache.org/licenses/LICENSE-2.0

[main]
# Development
ssl.enabled = false

plainMatcher=org.apache.shiro.authc.credential.SimpleCredentialsMatcher
#iniRealm=org.apache.shiro.realm.text.IniRealm
iniRealm.credentialsMatcher = $plainMatcher

localhostFilter=org.apache.jena.fuseki.authz.LocalhostFilter

[users]
# Implicitly adds "iniRealm = org.apache.shiro.realm.text.IniRealm"
admin=pw

[roles]

[urls]
## Control functions open to anyone
/$/status = anon
/$/server = anon
/$/ping = anon
/$/metrics = anon

## and the rest are restricted to localhost.
#/$/** = localhostFilter

## If you want simple, basic authentication user/password
## on the operations,
## 1 - set a better password in [users] above.
## 2 - comment out the "/$/** = localhost" line and use:
## "/$/** = authcBasic,user[admin]"

## or to allow any access.
##/$/** = anon

# Everything else
/**=anon
121 changes: 107 additions & 14 deletions src/main/bin/devreset-remote.sh → src/main/bin/devreset-legacy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
# directory is mounted as /data within the stardog container. Thus, while in
# the stardog container the path /data/UnitTestData must be available.
#
# It resets the stardog data on the remote server. Make sure to get
# It resets the stardog and opensearch data sets locally to update to
# the latest dev testing data set at that google drive URL.
#
help=0
Expand All @@ -16,8 +16,8 @@ while [[ "$#" -gt 0 ]]; do case $1 in
esac; shift; done

if [ $help == 1 ] || [ ${#arr[@]} -ne 1 ]; then
echo "Usage: src/main/bin/devreset-remote.sh \"c:/data/UnitTestData\""
echo " e.g. src/main/bin/devreset-remote.sh ../data/UnitTestData"
echo "Usage: src/main/bin/devreset.sh \"c:/data/UnitTestData\""
echo " e.g. src/main/bin/devreset.sh ../data/UnitTestData"
exit 1
fi
dir=${arr[0]}
Expand Down Expand Up @@ -50,11 +50,33 @@ elif [[ -z $STARDOG_USERNAME ]]; then
elif [[ -z $STARDOG_PASSWORD ]]; then
echo "ERROR: STARDOG_PASSWORD is not set"
exit 1
elif [[ -z $ES_SCHEME ]]; then
echo "ERROR: ES_SCHEME is not set"
exit 1
elif [[ -z $ES_HOST ]]; then
echo "ERROR: ES_HOST is not set"
exit 1
elif [[ -z $ES_PORT ]]; then
echo "ERROR: ES_PORT is not set"
exit 1
fi

# Prerequisites - check the UnitTest
echo " Check prerequisites"

# Check that reindex.sh is at src/main/bin
if [[ ! -e "src/main/bin/reindex.sh" ]]; then
echo "ERROR: src/main/bin/reindex.sh does not exist, run from top-level evsrestapi directory"
exit 1
fi

# Check NCIM
echo " check NCIM"
ct=`ls $dir/NCIM | grep RRF | wc -l`
if [[ $ct -le 20 ]]; then
echo "ERROR: unexpectedly small number of NCIM/*RRF files = $ct"
exit 1
fi
# Check NCIt weekly
echo " check NCIt weekly"
if [[ ! -e "$dir/ThesaurusInferred_+1weekly.owl" ]]; then
Expand Down Expand Up @@ -118,16 +140,16 @@ fi

# Verify docker stardog is running
echo " verify docker stardog is running"
ct=`sudo docker ps | grep 'stardog/stardog' | wc -l`
ct=`docker ps | grep 'stardog/stardog' | wc -l`
if [[ $ct -ne 1 ]]; then
echo " ERROR: stardog docker is not running"
exit 1
fi

# Verify docker stardog has a volume mounted that contains UnitTestData
echo " verify docker stardog has /data/UnitTestData mounted"
pid=`sudo docker ps | grep stardog/stardog | cut -f 1 -d\ `
datadir=`sudo docker inspect -f '{{ .Mounts }}' $pid | perl -ne '/.*bind\s+([^\s]+)\s+\/data\s+.*/; print $1' | perl -pe 's/.*\/(host_mnt|mnt\/host)\/([cde])/$2:\//'`
pid=`docker ps | grep stardog/stardog | cut -f 1 -d\ `
datadir=`docker inspect -f '{{ .Mounts }}' $pid | perl -ne '/.*bind\s+([^\s]+)\s+\/data\s+.*/; print $1' | perl -pe 's/.*\/(host_mnt|mnt\/host)\/([cde])/$2:\//'`
if [[ -z "$datadir" ]]; then
echo "ERROR: unable to determine volume mounted to /data in docker $pid"
exit 1
Expand All @@ -137,6 +159,22 @@ if [[ ! -e "$datadir/UnitTestData" ]]; then
exit 1
fi

# Verify docker opensearch is running
echo " verify docker opensearch is running"
ct=`docker ps | grep 'opensearchproject/opensearch' | wc -l`
if [[ $ct -lt 1 ]]; then
echo " ERROR: opensearch docker is not running"
exit 1
fi

# Verify docker opensearch can be reached
echo " verify docker opensearch can be reached at $ES_SCHEME://$ES_HOST:$ES_PORT"
curl -s "$ES_SCHEME://$ES_HOST:$ES_PORT/_cat/indices" >> /dev/null
if [[ $? -ne 0 ]]; then
echo "ERROR: problem connecting to docker opensearch"
exit 1
fi

# Verfiy stardog container can run a script
echo " verify docker stardog can run a script"
/bin/rm -f $dir/x.txt
Expand All @@ -146,11 +184,11 @@ ls /data/UnitTestData > //data/UnitTestData/x.txt
EOF
chmod 755 $dir/x.sh
chmod ag+rwx $dir
pid=`sudo docker ps | grep stardog/stardog | cut -f 1 -d\ `
pid=`docker ps | grep stardog/stardog | cut -f 1 -d\ `
# note: //data is required for gitbash
sudo docker exec $pid //data/UnitTestData/x.sh
docker exec $pid //data/UnitTestData/x.sh
if [[ $? -ne 0 ]]; then
echo "ERROR: problem connecting to docker elasticsearch"
echo "ERROR: problem connecting to docker opensearch"
exit 1
fi
ct=`grep -c owl $dir/x.txt`
Expand All @@ -160,6 +198,45 @@ if [[ $ct -eq 0 ]]; then
fi
/bin/rm -f $dir/x.txt



# Remove opensearch indexes
echo " Remove opensearch indexes"
curl -s "$ES_SCHEME://$ES_HOST:$ES_PORT/_cat/indices" | cut -d\ -f 3 | egrep "metrics|concept|evs" | cat > /tmp/x.$$.txt
if [[ $? -ne 0 ]]; then
echo "ERROR: problem connecting to docker opensearch"
exit 1
fi
for i in `cat /tmp/x.$$.txt`; do
echo " remove $i"
curl -s -X DELETE "$ES_SCHEME://$ES_HOST:$ES_PORT/$i" >> /dev/null
if [[ $? -ne 0 ]]; then
echo "ERROR: problem removing opensearch index $i"
exit 1
fi
done

# Reindex ncim - individual terminologies
for t in MDR ICD10CM ICD9CM LNC SNOMEDCT_US RADLEX PDQ ICD10 HL7V3.0; do
# Keep the NCIM folder around while we run
echo "Load $t (from downloaded data)"
src/main/bin/ncim-part.sh --noconfig $dir/NCIM --keep --terminology $t > /tmp/x.$$.txt 2>&1
if [[ $? -ne 0 ]]; then
cat /tmp/x.$$.txt | sed 's/^/ /'
echo "ERROR: loading $t"
exit 1
fi
done

# Reindex ncim - must run after the prior section so that maps can connect to loaded terminologies
echo " Reindex ncim"
src/main/bin/ncim-part.sh --noconfig $dir/NCIM > /tmp/x.$$.txt 2>&1
if [[ $? -ne 0 ]]; then
cat /tmp/x.$$.txt | sed 's/^/ /'
echo "ERROR: problem running ncim-part.sh"
exit 1
fi

# Clean and load stardog
echo " Remove stardog databases and load monthly/weekly"
# TODO: if the following fails, there's nothing to catch it
Expand All @@ -183,21 +260,27 @@ echo " load data"
/opt/stardog/bin/stardog data add --named-graph http://UmlsSemNet NCIT2 /data/UnitTestData/UmlsSemNet/umlssemnet.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://MEDRT NCIT2 /data/UnitTestData/MED-RT/medrt.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://Canmed NCIT2 /data/UnitTestData/Canmed/canmed.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://Ctcae5 NCIT2 /data/UnitTestData/Ctcae5/ctcae5.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://CTCAE NCIT2 /data/UnitTestData/CTCAE/ctcae5.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://DUO_monthly NCIT2 /data/UnitTestData/DUO/duo_Feb21.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://DUO_monthly NCIT2 /data/UnitTestData/DUO/iao_Dec20.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://OBI_monthly NCIT2 /data/UnitTestData/OBI/obi_2022_07.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://OBIB NCIT2 /data/UnitTestData/OBIB/obib_2021-11.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://NDFRT2 NCIT2 /data/UnitTestData/NDFRT/NDFRT_Public_2018.02.05_Inferred.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://MGED NCIT2 /data/UnitTestData/MGED/MGEDOntology.fix.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://NPO NCIT2 /data/UnitTestData/NPO/npo-2011-12-08_inferred.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://MA NCIT2 /data/UnitTestData/Mouse_Anatomy/ma_07_27_2016.owl | sed 's/^/ /'
/opt/stardog/bin/stardog data add --named-graph http://Zebrafish NCIT2 /data/UnitTestData/Zebrafish/zfa_2019_08_02.owl | sed 's/^/ /'
echo " optimize databases"
/opt/stardog/bin/stardog-admin db optimize -n CTRP | sed 's/^/ /'
/opt/stardog/bin/stardog-admin db optimize -n NCIT2 | sed 's/^/ /'
# The -n parameter removed before DB name as per updated stardog (may need to re-pull latest)
/opt/stardog/bin/stardog-admin db optimize CTRP | sed 's/^/ /'
/opt/stardog/bin/stardog-admin db optimize NCIT2 | sed 's/^/ /'
EOF
chmod 755 $dir/x.sh
chmod ag+rwx $dir
pid=`sudo docker ps | grep stardog/stardog | cut -f 1 -d\ `
pid=`docker ps | grep stardog/stardog | cut -f 1 -d\ `
# note: //data is required for gitbash
sudo docker exec $pid //data/UnitTestData/x.sh
docker exec $pid //data/UnitTestData/x.sh
if [[ $? -ne 0 ]]; then
echo "ERROR: problem loading stardog"
exit 1
Expand All @@ -207,6 +290,16 @@ fi
# Hardcode the history file
historyFile=$dir/cumulative_history_21.06e.txt

# Reindex stardog terminologies
echo " Reindex stardog terminologies"
# After this point, the log is stored in the tmp folder unless an error is hit
src/main/bin/reindex.sh --noconfig --history $historyFile > /tmp/x.$$.txt 2>&1
if [[ $? -ne 0 ]]; then
cat /tmp/x.$$.txt | sed 's/^/ /'
echo "ERROR: problem running reindex.sh script"
exit 1
fi

# Cleanup
/bin/rm -f /tmp/x.$$.txt $dir/x.{sh,txt}

Expand Down
Loading

0 comments on commit ab02e8e

Please sign in to comment.