diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 84f575ff1..e7a4a106b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,30 +9,117 @@ on: - '**/**.md' jobs: run-parse-server-ci: - name: CI of Parse Server ${{ matrix.version }} - timeout-minutes: 15 + name: CI of Parse Server + timeout-minutes: 60 runs-on: ubuntu-latest strategy: matrix: - version: ['5.6.0', '6.3.1'] + version: ['7.0.0'] steps: - name: Checkout parse-server ${{ matrix.version }} uses: actions/checkout@v2 with: repository: parse-community/parse-server - ref: ${{ matrix.version }} + ref: 7.0.0 path: parse-server - # - name: Run CI workflow of Parse Server - # uses: actions/github-script@v6 - # with: - # script: | - # const fs = require('fs'); - # const yaml = require('js-yaml'); - # const path = './parse-server/.github/workflows/ci.yml'; - # const ciConfig = yaml.load(fs.readFileSync(path, 'utf8')); - # console.log(ciConfig); - # return ciConfig; - + - name: Create Oracle Storage Adapter Dir + run: mkdir parse-server/src/Adapters/Storage/Oracle; + ls -la parse-server/src/Adapters/Storage/Oracle; + - name: Checkout Oracle Storage Adapter + uses: actions/checkout@v2 + with: + repository: oracle-samples/oracleadapter-parse + ref: '' + path: parse-server/src/Adapters/Storage/Oracle + - name: Validate Oracle Dir + run: echo $PWD; + ls -la parse-server/src/Adapters/Storage; + ls -la parse-server/src/Adapters/Storage/Oracle; + cd parse-server/src/Adapters/Storage/Oracle; + rm -rf .git; + - name: Install Sqlcl + run: wget https://download.oracle.com/otn_software/java/sqldeveloper/sqlcl-23.3.0.270.1251.zip; + unzip sqlcl-23.3.0.270.1251.zip; + ./sqlcl/bin/sql -V; + - name: Install Instant Client + run: uname -m; + wget https://download.oracle.com/otn_software/linux/instantclient/1923000/instantclient-basic-linux.x64-19.23.0.0.0dbru.zip; + unzip instantclient-basic-linux.x64-19.23.0.0.0dbru.zip; + ls -la; + ls -la $PWD/instantclient_19_23; + - name: Install libaio1 + run: sudo apt-get install libaio1; + - name: Checkout templateSpecs + uses: actions/checkout@v2 + with: + repository: ddrechse/privateAdapterTemplate + ref: '' + path: privateAdapterTemplate + - name: Move package.json, OracleStorageAdapter and OracleCollection + run: ls; + echo $PWD; + ls -la $PWD; + cp privateAdapterTemplate/package.json parse-server; + rm -rf parse-server/spec; + mv privateAdapterTemplate/spec parse-server; + cat parse-server/spec/helper.js + - name: Run Free23c + run: docker run --name free23c -d -p 1521:1521 -e ORACLE_PWD=Welcome12345 container-registry.oracle.com/database/free:latest; + docker ps -a; + docker images; + - name: Wait for container + run: sleep 120; + docker ps -a; + ./sqlcl/bin/sql -V; + - name: Enable SODA in image + run: echo "alter session set container=FREEPDB1;grant db_developer_role to pdbadmin;grant soda_app to pdbadmin;GRANT UNLIMITED TABLESPACE TO pdbadmin;quit;" > soda; + cat soda; + ./sqlcl/bin/sql sys/Welcome12345@localhost:1521/free as sysdba @./soda + - name: Run Tests + run: echo $PWD; + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.1/install.sh | bash; + export NVM_DIR="$HOME/.nvm"; + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"; + nvm install v20.12.0; + nvm list; + node --version; + echo "Before setting LIB PATH"; + export LD_LIBRARY_PATH=${PWD}/instantclient_19_23; + echo "This is LD LIBRARY PATH"; + echo $LD_LIBRARY_PATH; + echo "Again"; + cd parse-server; + cat package.json; + npm install; + npm install --save @parse/fs-files-adapter; + npm install oracledb@6.4.0; + npm ci; + export ORACLEDB_VERSION=19; + export PARSE_SERVER_DATABASE_ADAPTER='{"module":"../Adapters/Storage/Oracle/OracleStorageAdapter","options":{"databaseURI":"oracledb://pdbadmin:Welcome12345@localhost:1521/freepdb1","collectionPrefix":"test_"}}'; + echo $PARSE_SERVER_DATABASE_ADAPTER; + ORACLE_CLIENT_LOCATION=${LD_LIBRARY_PATH} npm run testoracle; +# mkdir oralogs; +# docker cp free23c: { + logger.verbose('getCollectionConnection about to get connection from pool '); + logger.verbose(p); + logger.verbose(' statistics: ' + JSON.stringify(p.getStatistics())); + return p.getConnection(); + }) + .then(conn => { + logger.verbose('getCollectionConnection about to get SodaDB'); + localConn = conn; + return conn.getSodaDatabase(); + }) + .then(sodadb => { + logger.verbose('getCollectionConnection open collection for ' + this._name); + this._oracleSodaDB = sodadb; + return sodadb.openCollection(this._name); + }) + .then(async coll => { + if (!coll) { + logger.verbose('getCollectionConnection create NEW collection for ' + this._name); + const newCollection = await this._oracleSodaDB.createCollection(this._name, { + metaData: mymetadata, + }); + + +// const newCollection = await this._oracleSodaDB.createCollection(this._name); + /* + Create index on _id for every new collection + This imitates Mongo behavior which happens automatically + + Index names MUST be unique in a schema, append table name + cannot have two indexes with the same name in a single schema. + */ + if (!this.idIndexCreating) { + this.idIndexCreating = true; + const indexName = 'ididx' + this._name; + const indexSpec = { name: indexName, unique: true, fields: [{ path: '_id' }] }; + await newCollection.createIndex(indexSpec); + logger.verbose( + 'getCollectionConnection successfully create _id index for ' + this._name + ); + // Add _id if it doesn't exist to indexes array + const found = this.indexes.find(item => { + return Object.keys(item)[0] === '_id_'; + }); + if (typeof found === 'undefined') { + this.indexes.push({ _id_: { _id: 1 } }); + } + } + return newCollection; + } + return coll; + }) + .catch(error => { + logger.error('getCollectionConnection ERROR: ' + error); + throw error; + }); + logger.verbose( + 'getCollectionConnection returning collection for ' + + this._name + + ' returned ' + + this._oracleCollection + ); + return localConn; + } + + // Atomically updates data in the database for a single (first) object that matched the query + // If there is nothing that matches the query - does insert + // Postgres Note: `INSERT ... ON CONFLICT UPDATE` that is available since 9.5. + async upsertOne(query, update, session) { + /* + UpsertOne is of the form + where query = + {"_id": "HasAllPOD"} + and update = the new document + {"_id": "HasAllPOD","numPODs": 17"} + + in this case if update fails becuase no document existed then + rerunning the query would return 0 and indicate an insert + */ + + logger.verbose('in upsertOne query = ' + JSON.stringify(query)); + logger.verbose('use session to make linter happy ' + JSON.stringify(session)); + // TODO need to use save(), which is the SODA equivalent of upsert() andit takes a SodaDocument + let docs; + let promise; + + try { + promise = await this.findOneAndUpdate(query, update, null); + logger.verbose('Upsert Promise = ' + promise); + if (promise === false) { + logger.verbose('Upsert Insert for query ' + JSON.stringify(query)); + promise = await this._rawFind(query, { type: 'sodadocs' }).then(d => (docs = d)); + if (docs && docs.length == 0) { + // Its an insert so merge query into update + _.merge(update, query); + promise = await this.insertOne(update); + } + } + return promise; + } catch (error) { + logger.error('Collection UpsertOne throws ' + error); + throw error; + } + } + + async findOneAndUpdate(query, update, transactionalSession) { + try { + logger.verbose('in Collection findOneAndUpdate query = ' + JSON.stringify(query)); + logger.verbose( + 'use transactionalSession to make linter happy ' + JSON.stringify(transactionalSession) + ); + + // TODO: Fix updatedAt, it should be _updatedAt because its an internal field + // and updatedAt doesn't get updated for Schemas + + let updateObj; + + let result = await this._rawFind(query, { type: 'one' }).then(result => { + return result; + }); + //************************************************************************************************/ + // Modify Update based on Mongo operators + // + // Look for $unset, Mongo's deleteField + // Create array of fieldNames to be deleted + const newUpdate = new Object(); + const fieldNames = new Array(); + Object.keys(update).forEach(item => { + if (item === '$unset') { + Object.keys(update[item]).forEach(item => { + fieldNames.push(item); + }); + } else { + if (item === '_updated_at') { + newUpdate['updatedAt'] = update[item]; + } else { + newUpdate[item] = update[item]; + } + } + }); + + // if FieldNames > 0, delete those fields and + // repalce update with newUpdate that has the $unset pairs removed + // Don't move deletefields to update transform code + if (fieldNames.length > 0) { + await this.deleteFields(fieldNames).then(result => { + update = newUpdate; + return result; + }); + // Ya changed the key values get them again + result = await this._rawFind(query, { type: 'one' }).then(result => { + return result; + }); + } + + // Process Increments $inc + const newIncUpdate = new Object(); + let incUpdt = false; + Object.keys(update).forEach(item => { + if (item === '$inc') { + Object.keys(update[item]).forEach(it2 => { + incUpdt = true; + _.set(result.content, it2, _.result(result.content, it2) + update[item][it2]); + }); + } else { + if (item === '_updated_at') { + newIncUpdate['updatedAt'] = update[item]; + } else { + newIncUpdate[item] = update[item]; + } + } + }); + + if (incUpdt) { + update = newIncUpdate; + } + + // Process $AddToSet operator adds a value to an array unless the value is already present, in which case $addToSet does nothing to that array. + const newAddToSetUpdate = new Object(); + let addToSetUpdt = false; + Object.keys(update).forEach(item => { + if (item === '$addToSet') { + Object.keys(update[item]).forEach(it2 => { + Object.keys(update[item][it2]).forEach(it3 => { + if (it3 === '$each') { + const updtArray = update[item][it2][it3]; + // Check for dot notation + const temp = it2.split('.'); + let newArray; + if (temp.length > 1) { + newArray = result.content[temp[0]][temp[1]]; + } else { + newArray = result.content[it2]; + } + updtArray.forEach(updt => { + if (typeof updt === 'object') { + if (!newArray.some(entry => Object.keys(entry)[0] === Object.keys(updt)[0])) { + addToSetUpdt = true; + newArray.push(updt); + } + } else { + if (!newArray.includes(updt)) { + addToSetUpdt = true; + newArray.push(updt); + } + } + }); + } + }); + }); + } else { + if (item === '_updated_at') { + newAddToSetUpdate['updatedAt'] = update[item]; + } else { + newAddToSetUpdate[item] = update[item]; + } + } + }); + + if (addToSetUpdt) { + update = newAddToSetUpdate; + } + + // Process $pullAll operator removes all instances of the specified values from an existing array. + const newPullAllUpdate = new Object(); + let pullAllUpdt = false; + Object.keys(update).forEach(item => { + if (item === '$pullAll') { + Object.keys(update[item]).forEach(it2 => { + const updtArray = update[item][it2]; + const rsltArray = result.content[it2]; + const newArray = new Array(); + updtArray.forEach(updt => { + if (typeof updt === 'object') { + rsltArray.forEach(entry => { + if (Object.keys(entry)[0] != Object.keys(updt)[0]) { + newArray.push(entry); + pullAllUpdt = true; + } + }); + } + newPullAllUpdate[it2] = newArray; + }); + }); + } else { + if (item === '_updated_at') { + newPullAllUpdate['updatedAt'] = update[item]; + } else { + newPullAllUpdate[item] = update[item]; + } + } + }); + + if (pullAllUpdt) { + update = newPullAllUpdate; + } + + // End of Transform Update + //************************************************************************************************/ + + if (result && Object.keys(result).length > 0) { + // found the doc, so we need to update it + const key = result.key; + logger.verbose('key = ' + key); + const version = result.version; + logger.verbose('version = ' + version); + const oldContent = result.content; + + logger.verbose('oldContent = ' + JSON.stringify(oldContent)); + logger.verbose('update = ' + JSON.stringify(update)); + + // Check for empty object and remove it from original, no merging, replacing + Object.keys(update).forEach(item => { + if ( + typeof update[item] === 'object' && + update[item] !== null && + item !== 'updatedAt' && + Object.keys(update[item]).length === 0 + ) { + _.unset(oldContent, item); + } + }); + + if (update.fieldName) { + const theUpdate = { [update.fieldName]: update.theFieldType }; + logger.verbose('theUpdate = ' + JSON.stringify(theUpdate)); + updateObj = { ...oldContent, ...theUpdate }; + } else { + if (pullAllUpdt || update['_metadata']) { + // Handle set or merge for _metadata in Schema + Object.keys(update).forEach(item => { + const found = Object.keys(oldContent).find(item => { + return item === '_metadata'; + }); + if (item === '_metadata') { + if (found) { + if ( + Object.prototype.hasOwnProperty.call(oldContent[item], 'class_permissions') && + Object.prototype.hasOwnProperty.call(update[item], 'class_permissions') + ) { + // Just reset class_permissions to update + _.set(oldContent[item], 'class_permissions', update[item]['class_permissions']); + } else { + _.merge(oldContent['_metadata'], update[item]); + } + } else { + _.set(oldContent, item, update[item]); + } + } else { + _.set(oldContent, item, update[item]); + } + }); + updateObj = oldContent; + } else { + updateObj = _.merge(oldContent, update); + } + } + logger.verbose('Updated Object = ' + JSON.stringify(updateObj)); + let localConn = null; + return this.getCollectionConnection() + .then(conn => { + localConn = conn; + return this._oracleCollection.find().key(key).version(version).replaceOne(updateObj); + }) + .then(result => { + if (result.replaced == true) { + return updateObj; + } else { + return 'retry'; + } + }) + .finally(async () => { + if (localConn) { + await localConn.close(); + localConn = null; + } + }) + .catch(error => { + logger.error('Find One and Update replaceOne ERROR = ', error); + throw error; + }); + } else { + logger.verbose('No Docs, nothing to update, return false'); + return false; + } + } catch (error) { + logger.error('Find One and Update ERROR = ', error); + throw error; + } + } + + async updateSchemaIndexes(query, update) { + // This method just updates Schema _metadata.indexes + // It is laways a set (replace), never a merge + logger.verbose('in Collection updateSchemaIndexes query = ' + JSON.stringify(query)); + logger.verbose('update = ' + JSON.stringify(update)); + const result = await this._rawFind(query, { type: 'one' }).then(result => { + return result; + }); + if (Object.keys(result).length > 0) { + // found the doc, so we need to update it + const key = result.key; + logger.verbose('key = ' + key); + const version = result.version; + logger.verbose('version = ' + version); + const oldContent = result.content; + logger.verbose('oldContent = ' + JSON.stringify(oldContent)); + logger.verbose('update = ' + JSON.stringify(update)); + // Either set or merge _metadata depending on if it existed before + Object.keys(update).forEach(item => { + if (item === '_metadata') { + if (Object.prototype.hasOwnProperty.call(oldContent, item)) { + if (Object.prototype.hasOwnProperty.call(oldContent[item], 'indexes')) { + if ( + Object.keys(update).length <= Object.keys(oldContent['_metadata']['indexes']).length + ) { + // Its a delete. Parse deletes by sending an update with the deleted index + // Set Indexes w Update only + _.set(oldContent[item], 'indexes', update[item]['indexes']); + } else { + _.merge(oldContent['_metadata'], update[item]); + } + } else { + _.merge(oldContent['_metadata'], update[item]); + } + } else { + _.set(oldContent, item, update[item]); + } + } + }); + const updateObj = oldContent; + logger.verbose('Updated Object = ' + JSON.stringify(updateObj)); + + let localConn = null; + return this.getCollectionConnection() + .then(conn => { + localConn = conn; + return this._oracleCollection.find().key(key).version(version).replaceOne(updateObj); + }) + .then(result => { + if (result.replaced == true) { + return update; + } else { + return 'retry'; + } + }) + .finally(async () => { + if (localConn) { + await localConn.close(); + localConn = null; + } + }) + .catch(error => { + logger.error('updateSchemaIndexes update ERROR: ', error); + throw error; + }); + } else { + logger.verbose('updateSchemaIndexes No record found for query: ' + JSON.stringify(query)); + return false; + } + } + catch(error) { + logger.error('updateSchemaIndexes ERROR: ', error); + throw error; + } + + async findOneAndDelete(query: string) { + try { + logger.verbose('in Collection findOneAndDelete query = ' + JSON.stringify(query)); + + const result = await this._rawFind(query, { type: 'one' }).then(result => { + return result; + }); + + if (Object.keys(result).length > 0) { + // found the doc, so we need to update it + const key = result.key; + logger.verbose('key = ' + key); + const version = result.version; + logger.verbose('version = ' + version); + + let localConn = null; + return this.getCollectionConnection() + .then(conn => { + localConn = conn; + return this._oracleCollection.find().key(key).version(version).remove(); + }) + .finally(async () => { + if (localConn) { + await localConn.close(); + localConn = null; + } + }) + .catch(error => { + logger.error('Find One and Delete remove ERROR: ', error); + throw error; + }); + } else { + logger.verbose('Find One and Delete No record found for query: ' + JSON.stringify(query)); + } + } catch (error) { + logger.error('Find One and Delete ERROR: ', error); + throw error; + } + } + + async deleteObjectsByQuery(query, transactionalSession) { + try { + logger.verbose('in Collection deleteObjectsByQuery query = ' + JSON.stringify(query)); + logger.verbose( + 'use transactionalSession to make linter happy ' + JSON.stringify(transactionalSession) + ); + + const result = await this._rawFind(query, { type: 'all' }).then(result => { + return result; + }); + + if (result.length > 0) { + for (let i = 0; i < result.length; i++) { + // found the doc, so we need to update it + const key = result[i].key; + logger.verbose('key = ' + key); + const version = result[i].version; + logger.verbose('version = ' + version); + let localConn = null; + return this.getCollectionConnection() + .then(conn => { + localConn = conn; + return this._oracleCollection.find().key(key).version(version).remove(); + }) + .finally(async () => { + if (localConn) { + await localConn.close(); + localConn = null; + } + }) + .catch(error => { + logger.error('Delete Objects By Query remove ERROR: ', error); + throw error; + }); + } + } else { + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.'); + } + } catch (error) { + logger.error('Delete Objects By Query ERROR: ', error); + throw error; + } + } + + // Delete fields from all documents in a collection + async deleteFields(fieldNames: Array) { + try { + var promises = Array(); + // Rewriting like createIndexes, Collection method will just delete a field + logger.verbose( + 'DeleteFields ' + JSON.stringify(fieldNames) + ' for Collection ' + this._name + ); + for (let idx = 0; idx < fieldNames.length; idx++) { + const fieldName = fieldNames[idx]; + logger.verbose('about to delete field' + fieldName); + const promise = this.deleteFieldFromCollection(fieldName) + .then(promise => { + if (promise === 'retry') { + return this.deleteFieldFromCollection(fieldName); + } + return promise; + }) + .catch(error => { + logger.error('Collection deleteFields caught error ' + error.message); + throw error; + }); + promises.push(promise); + } + + const results = await Promise.all(promises); + logger.verbose('DeleteFields returns ' + results); + return results; + } catch (error) { + logger.error('Delete Fields ERROR: ', error); + throw error; + } + } + + // deleteField from all docs in a collection that has it + async deleteFieldFromCollection(fieldName: string) { + try { + logger.verbose('deleteFieldFromCollection fieldName to delete is ' + fieldName); + const query = JSON.parse(`{"${fieldName}":{"$exists":true}}`); + const result = await this._rawFind(query, { type: 'all' }).then(result => { + return result; + }); + + if (result.length > 0) { + // found the doc, so we need to update it + var promises = Array(); + for (let i = 0; i < result.length; i++) { + const promise = this.deleteField( + fieldName, + result[i].key, + result[i].version, + result[i].content + ) + .then(promise => { + if (promise === 'retry') { + return this.deleteFieldFromCollection(fieldName); + } + return promise; + }) + .catch(error => { + logger.error('deleteFieldFromConnection caught error ' + error.message); + throw error; + }); + promises.push(promise); + } + + const results = await Promise.all(promises); + logger.verbose('DeleteFieldFromCollection returns ' + results); + return results; + } else { + logger.verbose('Field ' + fieldName + ' Not Found In DeleteFieldFromCollection'); + return false; + } + } catch (error) { + logger.error('Delete Field ERROR: ', error); + throw error; + } + } + + // deleteField from a specific document containing it + async deleteField(fieldName: string, key: string, version: string, oldContent: string) { + logger.verbose('key = ' + key); + logger.verbose('version = ' + version); + logger.verbose('oldContent before delete = ' + JSON.stringify(oldContent)); + delete oldContent[fieldName]; + logger.verbose('oldContent after delete update = ' + JSON.stringify(oldContent)); + + let localConn = null; + return this.getCollectionConnection() + .then(conn => { + localConn = conn; + return this._oracleCollection.find().key(key).version(version).replaceOne(oldContent); + }) + .then(result => { + if (result.replaced == true) { + return oldContent; + } else { + return 'retry'; + } + }) + .finally(async () => { + if (localConn) { + await localConn.close(); + localConn = null; + } + }) + .catch(error => { + logger.error('DeleteFieldFromCollection replaceOne ERROR: ', error); + throw error; + }); + } + + // Delete a field in a specific SCHEMA doc + async deleteSchemaField(query: string, fieldName: string) { + try { + logger.verbose('fieldName to delete is ' + fieldName); + const existobj = JSON.parse(`{"${fieldName}":{"$exists":true}}`); + const newquery = { ...query, ...existobj }; + const result = await this._rawFind(newquery, { type: 'one' }).then(result => { + return result; + }); + + if (result) { + // found the doc, so we need to update it + const key = result.key; + logger.verbose('key = ' + key); + const version = result.version; + logger.verbose('version = ' + version); + const oldContent = result.content; + + logger.verbose('oldContent before delete = ' + JSON.stringify(oldContent)); + delete oldContent[fieldName]; + logger.verbose('oldContent after delete update = ' + JSON.stringify(oldContent)); + + let localConn = null; + return this.getCollectionConnection() + .then(conn => { + localConn = conn; + return this._oracleCollection.find().key(key).version(version).replaceOne(oldContent); + }) + .then(result => { + if (result.replaced == true) { + return oldContent; + } else { + return 'retry'; + } + }) + .finally(async () => { + if (localConn) { + await localConn.close(); + localConn = null; + } + }) + .catch(error => { + logger.error('Delete SCHEMA Field replaceOne ERROR: ', error.message); + throw error; + }); + } else { + logger.verbose('Field ' + fieldName + ' Not Found In DeleteSchemaField'); + return false; + } + } catch (error) { + logger.error('Delete SCHEMA Field ERROR: ', error); + throw error; + } + } + + // Does a find with "smart indexing". + // Currently this just means, if it needs a geoindex and there is + // none, then build the geoindex. + // This could be improved a lot but it's not clear if that's a good + // idea. Or even if this behavior is a good idea. + async find( + query, + { + skip, + limit, + sort, + keys, + maxTimeMS, + readPreference, + hint, + caseInsensitive, + explain, + sortTypes, + } = {} + ) { + try { + logger.verbose('entering find()'); + // Support for Full Text Search - $text + if (keys && keys.$score) { + delete keys.$score; + keys.score = { $meta: 'textScore' }; + } + + return this._rawFind( + query, + { type: 'content' }, + { + skip, + limit, + sort, + keys, + maxTimeMS, + readPreference, + hint, + caseInsensitive, + explain, + sortTypes, + } + ).then(result => { + return result; + }); + } catch (error) { + logger.verbose("in find()'s error block"); + // Check for "no geoindex" error + if (error.code != 17007 && !error.message.match(/unable to find index for .geoNear/)) { + throw error; + } + // Figure out what key needs an index + const key = error.message.match(/field=([A-Za-z_0-9]+) /)[1]; + if (!key) { + throw error; + } + // TODO: Need to fix up this call to DB + // TODO: MUST FIX + var index = {}; + index[key] = '2d'; + await this.getCollectionConnection(); + + const result = await this._oracleCollection + .createIndex(index) + // Retry, but just once. + .then(() => + this._rawFind(query, { + skip, + limit, + sort, + keys, + maxTimeMS, + readPreference, + hint, + caseInsensitive, + explain, + }) + ); + this.closeConnection(); + return result.map(i => i.getContent()); + } + } + + async _rawFind( + query, + retval, + { + skip, + limit, + sort, + keys, + maxTimeMS, + readPreference, + hint, + caseInsensitive, + explain, + sortTypes, + } = {} + ) { + logger.verbose('_rawFind: collection = ' + JSON.stringify(this._oracleCollection)); + logger.verbose('query = ' + JSON.stringify(query)); + logger.verbose('limit = ' + limit); + // use these so the linter will not complain - until i actually use them properly + logger.verbose( + 'TODO: not using these: ' + sort, + maxTimeMS, + readPreference, + caseInsensitive, + explain + ); + + let localConn = null; + try { + let findOperation; + + await this.getCollectionConnection() + .then(conn => { + localConn = conn; + findOperation = this._oracleCollection.find(); + }) + .catch(async error => { + logger.error('Error getting connection in _rawFind, ERROR =' + error); + if (localConn) { + await localConn.close(); + localConn = null; + } + throw error; + }); + + // let findOperation = this._oracleCollection.find(); // find() is sync and returns SodaOperation + + // All this below is to handle empty array in $in selection + // Node APIs fail for empty array error + // The fix will be in a future release of instant client + // https://orahub.oci.oraclecorp.com/ora-microservices-dev/mbaas-parse-server/-/wikis/ORA-40676:-invalid-Query-By-Example-(QBE)-filter-specification-JZN-00305:-Array-of-values-was-empty + const myObj = JSON.parse(JSON.stringify(query)); + + for (const x in myObj) { + if (typeof myObj[x] === 'object') { + const json = JSON.parse(JSON.stringify(myObj[x])); + + //CDB + //to manage EqualTo() with null + // when an input query is like + // {"foo":null,"$or":[{"_rperm":{"$in":["*","*"]}},{"_rperm":null},{"_rperm":{"$exists":false}}]} + // and need to generate a $or for null check, need to wrap the whole thing with a $and + // It looks like null = non-existance or null + if (json == null) { + let newQuery = {}; + + if (Object.prototype.hasOwnProperty.call(myObj, '$or')) { + // This whole not handling null is getting ugly + const originalOr = JSON.stringify(myObj['$or']); + const queryOr = JSON.stringify({ $or: [{ [x]: { $exists: false } }, { [x]: null }] }); + const andString = `[${queryOr},{"$or":${originalOr}}]`; + newQuery['$and'] = JSON.parse(andString); + delete myObj['$or']; + } else { + newQuery = { $or: [{ [x]: { $exists: false } }, { [x]: null }] }; + } + query = newQuery; + } + //CDB-END + //CDB + //to manage notEqualTo() with null + if (json != null) { + if (Object.keys(json)[0] == '$ne') { + if (json['$ne'] == null) { + const newQuery = { $and: [{ [x]: { $exists: true } }, { [x]: { $ne: null } }] }; + query = newQuery; + } + } + } + //CDB-END + + //CDD + // Remove empty objects from $and clause + // ORA-40676: invalid Query-By-Example (QBE) filter specification + // JZN-00315: Empty objects not allowed + // + // fix up queries like + // { '$and': [ {}, { _p_user: '_User$EYTVvcG4j9' } ] } + if (json != null && x == '$and') { + if (Array.isArray(json)) { + const condList = new Array(); + json.forEach(item => { + if (!(Object.keys(item).length === 0)) { + condList.push(item); + } + }); + query = { + $and: condList, + }; + } + } + //CDD + + for (const y in json) { + //query should not match on array when searching for null + if (y === '$all' && Array.isArray(json[y]) && json[y][0] == null) { + if (localConn) { + await localConn.close(); + localConn = null; + } + return []; + } else { + // to manage $all of normal expression for query match on array with multiple objects + if ( + y === '$all' && + Array.isArray(json[y]) && + json[y][0]['__FIELD__!!__'] === undefined + ) { + const newCondList = Array(); + + for (var ass in myObj[x]['$all']) { + if (typeof myObj[x]['$all'][ass] === 'object') { + // ??? + const condList = myObj[x]['$all'][0]; + Object.keys(condList).forEach(function (key) { + // key: the name of the object key + // index: the ordinal position of the key within the object + const newField = x + '[*].' + key; + newCondList.push({ + [newField]: condList[key], + }); + }); + } + } + // For 'containsAll date array queries','containsAll string array queries','containsAll number array queries' + // no 'objects' in array: doesn't need a query re-write in $and:[] 'for query match on array with multiple objects' + // newCondList == [] + if (newCondList.length != 0) { + query = { + $and: newCondList, + }; + } + } //CDB + } + + if (y === '$in' || y === '$nin' || y === '$all') { + if (json[y].length > 0 && json[y][0] !== null) { + //TO MANAGE 'containsAllStartingWith single empty value returns empty results' test + if ( + Object.keys(json[y][0]).length == 0 && + y === '$all' && + typeof json[y][0] == 'object' + ) { + if (localConn) { + await localConn.close(); + localConn = null; + } + return []; + } + } + + if (json[y].length == 0) { + if (y === '$in' || y === '$all') { + if (localConn) { + await localConn.close(); + localConn = null; + } + return []; + } else { + query = JSON.parse('{}'); + } + } + } + // to manage $all of $regex expression + //To exclude a $all on $regex array to be transformed in $and + + /* CDD Commented this code out becuase it broke this query + {"numbers":{"$all":[1,2,3]} + and this test + containsAll number array queries + */ + + /* if (y === '$all' && json[y][0]['__FIELD__!!__'] === undefined) { + //find wrong field + for (ass in myObj[x]['$all']) { + if (typeof myObj[x]['$all'][ass] === 'object') { + if (Object.keys(ass)[0] != '$regex') { + //TO BE FIXED + if (localConn) { + localConn.close(); + localConn = null; + } + return []; + } + } + } //To manage 'containsAll number array queries' in conflict with 'containsAllStartingWith single empty value returns empty results' test + if (localConn) { + localConn.close(); + localConn = null; + } + return []; + }*/ + + if (y === '$all' && !(json[y][0]['__FIELD__!!__'] === undefined)) { + const condList = []; + + for (const condition in query[x][y]) { + condList.push({ + [x]: query[x][y][condition]['__FIELD__!!__'], + }); + } + + query = { + $and: condList, + }; + } //CDB-END + } + + // Let $or just passthrough + if (x === '$or') { + query[x] = myObj[x]; + } + } + } //CDB + + if (sort && Object.keys(sort).length != 0) { + //ADD ORDER IN QUERY + //FIX 15-11 + const orderByList = []; //let collection = new OracleSchemaCollection(this._oracleCollection); + for (const s in sort) { + const order = sort[s] == -1 ? 'desc' : 'asc'; + const orderStatement = { + path: s, + datatype: sortTypes[s], + order: order, + }; //Fix 11-11 + + orderByList.push(orderStatement); + } //Fix 15-11 + + const oldQuery = query; + query = {}; + query['$query'] = oldQuery; + query['$orderby'] = orderByList; //Fix-End 11-11 + } // CDB-END + + findOperation = findOperation.filter(query); + + if (skip) { + findOperation = findOperation.skip(Number(skip)); + } + + if (limit) { + findOperation = findOperation.limit(Number(limit)); + } + + if (hint) { + findOperation = findOperation.hint(String(hint)); + } + // TODO need to handle sort and readPreference + // let findOperation = this._oracleCollection.find(query, { + // skip, + // limit, + // sort, + // readPreference, + // hint, + // }); + + if (keys) { + logger.verbose('keys.. with input = ' + JSON.stringify(keys)); + // param needs to be an Array + // check it is not an empty object... + if (!_.isEmpty(keys)) { + logger.verbose('keys was not empty'); + //CDB + //findOperation = findOperation.keys(keys); + //CDB-END + } + } + + // if (caseInsensitive) { + // findOperation = findOperation.collation(OracleCollection.caseInsensitiveCollation()); + // } + + // if (maxTimeMS) { + // findOperation = findOperation.maxTimeMS(maxTimeMS); + // } + + logger.verbose('findOperation = ' + JSON.stringify(findOperation)); + logger.verbose('about to getDocuments()'); + let localDocs; + return findOperation + .getDocuments() + .then(docs => { + if (retval.type === 'content') { + localDocs = docs.map(i => i.getContent()); + } + if (retval.type === 'sodadocs') { + localDocs = docs; + } + if (retval.type === 'one') { + // return docs, keys and version + if (docs && docs.length == 1) { + const one = new Object(); + one.content = docs[0].getContent(); + one.key = docs[0].key; + one.version = docs[0].version; + localDocs = one; + } else { + if (docs && docs.length == 0) { + return {}; + } else { + logger.error('rawFind ONE return type found multiple docs'); + throw 'rawFind ONE return type found multiple docs'; + } + } + } + if (retval.type === 'all') { + // return docs, keys and version + if (docs) { + const returndocs = new Array(); + for (var i = 0; i < docs.length; i++) { + const all = new Object(); + all.content = docs[i].getContent(); + all.key = docs[i].key; + all.version = docs[i].version; + returndocs.push(all); + } + localDocs = returndocs; + } + } + return localDocs; + }) + .finally(async () => { + if (localConn) { + await localConn.close(); + localConn = null; + } + }) + .catch(error => { + logger.error('Error running findOperation GetDocuments, ERROR =' + error); + throw error; + }); + } catch (error) { + if (localConn) { + await localConn.close(); + localConn = null; + } + logger.error('Error running _rawfind, ERROR =' + error); + throw error; + } + } + + //CDB 17-11 fix + + async distinct(field, query) { + //return this._oracleCollection.distinct(field, query); + const objects = await this._oracleCollection.find().filter(query).getDocuments(); + const arr = []; + for (const obj in objects) { + const content = _.get(objects[obj].getContent(), field); + Array.isArray(content) ? arr.push(...content) : arr.push(content); + } + //let distinctObjects = [...new Set(arr)]; + return [...new Set(arr)]; + } + //CDB-END + + async updateOne(query, update) { + logger.verbose('UpdateOne calling findOneandUpdate'); + return this.findOneAndUpdate(query, update, null); + } + + async insertOne(object) { + let localConn = null; + + const plsql = ` + BEGIN + EXECUTE IMMEDIATE (' + alter session set ddl_lock_timeout=300 + '); + END;`; + + return this.getCollectionConnection() + .then(conn => { + localConn = conn; + const result1 = localConn.execute(plsql); + const result = this._oracleCollection.insertOne(object); + return result; + }) + .finally(async () => { + if (localConn) { + await localConn.close(); + localConn = null; + } + }) + .catch(error => { + if (error.errorNum !== 1) { + console.log('CDD error during insertOne = ' + error + " for Object " + JSON.stringify(object)); + console.trace(); + } + logger.error('error during insertOne = ' + error); + throw error; + }); + +/* try { +// console.log('CDD Collection insertOne before getConnection'); + localConn = await this.getCollectionConnection(); + const result1 = await localConn.execute(plsql); +// console.log('CDD Collection got connection calling insertOne for object ' + JSON.stringify(object)); + const result = await this._oracleCollection.insertOneAndGet(object); +// console.log('CDD Collection insertOne returned successfully'); + return object; + } catch (error) { + logger.error('error during insertOne = ' + error); +// console.log('CDD error during insertOne = ' + error); + throw error; + } finally { + if (localConn) { + await localConn.close(); + } + }*/ + } + + async drop() { + let localConn = null; + + logger.verbose('entered drop for ' + this._name); + return this.getCollectionConnection() + .then(conn => { + localConn = conn; + return this._oracleCollection.drop(); + }) + .then(result => { + if (result) { + logger.verbose('drop succeeded for ' + this._name); + } else { + logger.verbose('drop failed for ' + this._name); + } + return result; + }) + .finally(async () => { + if (localConn) { + await localConn.close(); + localConn = null; + } + }) + .catch(error => { + logger.error('in Drop Error' + error); + throw error; + }); + } + + async truncate() { + // collection.truncate() does not work with instant clients less than version 20 + // https://oracle.github.io/node-oracledb/doc/api.html#-11212-sodacollectiontruncate + // Error: DPI-1050: Oracle Client library is at version 19.8 but version 20.1 or higher is needed + // for now, do it the old fashioned way with collection.find.remove + let localConn = null; +// console.log('Truncate collection ' + this._name); + return this.getCollectionConnection() + .then(conn => { + localConn = conn; + return this._oracleCollection.truncate(); +// return this._oracleCollection.find().remove(); + }) + .finally(async () => { + if (localConn) { + await localConn.close(); + localConn = null; + } + }) + .catch(error => { + logger.error('in truncate Error' + error); + throw error; + }); + } + async _fetchAllSchemasFrom_SCHEMA() { + return this._rawFind({}, { type: 'content' }) + .then(schemas => { + logger.verbose('schemas = ' + schemas); + return schemas; + }) + .catch(error => { + logger.error('error during fetchAllSchemasFrom_SCHEMA = ' + error); + throw error; + }); + } + + getCollectionName() { + return this._name; + } + + _ensureSparseUniqueIndexInBackground(indexRequest) { + // TODO rewrite params to suit oracle soda + logger.verbose( + 'entered _ensureSparseUniqueIndexInBackground with indexRequest = ' + + JSON.stringify(indexRequest) + ); + return this._createIndex(indexRequest); + } + + async _createIndex(indexSpec) { + let localConn = null; + + const plsql = ` + BEGIN + EXECUTE IMMEDIATE (' + alter session set ddl_lock_timeout=1000 + '); + END;`; +// console.log('_createIndex index spec is ' + JSON.stringify(indexSpec)); + logger.verbose('_createIndex index spec is ' + JSON.stringify(indexSpec)); + return await this.getCollectionConnection() + .then(async conn => { + localConn = conn; + await conn.execute(plsql); + await this._oracleCollection.createIndex(indexSpec); + return Promise.resolve; + }) + .then(result => { + // Parse expects _id index in Schema to be + // _metadata: { indexes: { _id_: { _id: 1 }, name_1: { name: 1 } } } + const idx = { [indexSpec.fields[0].path]: 1 }; + if (indexSpec.fields[0].path === '_id') { + // indexSpec.fields[0].path = '_id_'; + indexSpec.name = '_id_'; + } + // const obj = { [indexSpec.fields[0].path]: [idx] }; + const obj = { [indexSpec.name]: [idx] }; + this.indexes.push(obj); + return result; + }) + .finally(async () => { + if (localConn) { + await localConn.close(); + localConn = null; + } + }) + .catch(error => { + if (error.errorNum === 40733) { + /* + Rebuild internal indexes array on server restart from schema indexes + */ + const found = this.indexes.find(item => { + // Parse expects _id index in Schema to be + // _metadata: { indexes: { _id_: { _id: 1 }, name_1: { name: 1 } } } + if (indexSpec.fields[0].path === '_id') { + indexSpec.fields[0].path = '_id_'; + } + return Object.keys(item)[0] === indexSpec.fields[0].path; + }); + + if (typeof found === 'undefined') { + const idx = { [indexSpec.fields[0].path]: 1 }; + if (indexSpec.fields[0].path === '_id') { + indexSpec.name = '_id_'; + } + // const obj = { [indexSpec.fields[0].path]: [idx] }; + const obj = { [indexSpec.name]: [idx] }; + this.indexes.push(obj); + return Promise.resolve; + } + logger.verbose('Index' + JSON.stringify(indexSpec) + ' already exists'); + } else { + logger.error('createIndex throws ' + error); + console.log('createIndex throws ' + error); + throw error; + } + }); + } + + getIndexes(className) { + logger.verbose('OracleCollection getIndexes className = ' + className); + + // There is an odd case where _id is not added to schema document until server restart + // If _id_ does not exist in indexes array add it to returned array. + // It does exist on the actual Collection + const found = this.indexes.find(item => { + return Object.keys(item)[0] === '_id_'; + }); + if (typeof found === 'undefined') { + this.indexes.push({ _id_: { _id: 1 } }); + } + logger.verbose('getIndexes returns ' + JSON.stringify(this.indexes)); + return this.indexes; + } + + async dropIndex(indexName) { + logger.verbose('Collection ' + this._name + ' is dropping index' + indexName); + let localConn = null; + + const result = await this.getCollectionConnection() + .then(async conn => { + localConn = conn; + const result = await this._oracleCollection.dropIndex(indexName); + return result; + }) + .finally(async () => { + if (localConn) { + await localConn.close(); + localConn = null; + } + }) + .catch(error => { + logger.error('error during dropIndex = ' + error); + throw error; + }); + + const found = this.indexes.find(item => { + return Object.keys(item)[0] === indexName; + }); + if (found) { + this.indexes.splice(this.indexes.indexOf(found), 1); + } + + return result; + } +} diff --git a/OracleStorageAdapter.js b/OracleStorageAdapter.js new file mode 100644 index 000000000..56726e6d7 --- /dev/null +++ b/OracleStorageAdapter.js @@ -0,0 +1,1321 @@ +// Copyright (c) 2023, Oracle and/or its affiliates. +// Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ +import defaults from '../../../defaults'; +import OracleSchemaCollection from './OracleSchemaCollection'; +import OracleCollection from './OracleCollection'; +import { StorageAdapter } from '../StorageAdapter'; +import type { SchemaType, StorageClass, QueryType, QueryOptions } from '../StorageAdapter'; +// @flow-disable-next +import Parse from 'parse/node'; +// @flow-disable-next +import _ from 'lodash'; +import logger from '../../../logger.js'; + +import { + transformKey, + transformWhere, + transformUpdate, + parseObjectToOracleObjectForCreate, + oracleObjectToParseObject, + transformPointerString, +} from './OracleTransform'; +import { Pool } from 'oracledb'; + +const oracledb = require('oracledb'); +const OracleSchemaCollectionName = '_SCHEMA'; + +const storageAdapterAllCollections = oracleAdapter => { + const collections = oracleAdapter.listAllCollections(oracleAdapter._collectionPrefix); + logger.verbose('collections is ' + JSON.stringify(collections)); + return collections; +}; + +//CDB +//to preserve the original query to hack for $containedBy +var queryBackup = ''; +//CDB-END + +var initialized = false; +var createConnPool = true; +var schemaCollection = null; + +const convertParseSchemaToOracleSchema = ({ ...schema }) => { + delete schema.fields._rperm; + delete schema.fields._wperm; + + if (schema.className === '_User') { + // Legacy mongo adapter knows about the difference between password and _hashed_password. + // Future database adapters will only know about _hashed_password. + // Note: Parse Server will bring back password with injectDefaultSchema, so we don't need + // to add _hashed_password back ever. + delete schema.fields._hashed_password; + } + + return schema; +}; + +// Returns { code, error } if invalid, or { result }, an object +// suitable for inserting into _SCHEMA collection, otherwise. +const oracleSchemaFromFieldsAndClassNameAndCLP = ( + fields, + className, + classLevelPermissions, + indexes +) => { + const oracleObject = { + _id: className, + // TODO: I'm not sure we need objectId + objectId: 'string', + updatedAt: 'string', + createdAt: 'string', + _metadata: undefined, + }; + + for (const fieldName in fields) { + const { type, targetClass, ...fieldOptions } = fields[fieldName]; + oracleObject[fieldName] = OracleSchemaCollection.parseFieldTypeToOracleFieldType({ + type, + targetClass, + }); + if (fieldOptions && Object.keys(fieldOptions).length > 0) { + oracleObject._metadata = oracleObject._metadata || {}; + oracleObject._metadata.fields_options = oracleObject._metadata.fields_options || {}; + oracleObject._metadata.fields_options[fieldName] = fieldOptions; + } + } + + if (typeof classLevelPermissions !== 'undefined') { + oracleObject._metadata = oracleObject._metadata || {}; + if (!classLevelPermissions) { + delete oracleObject._metadata.class_permissions; + } else { + oracleObject._metadata.class_permissions = classLevelPermissions; + } + } + + if (indexes && typeof indexes === 'object' && Object.keys(indexes).length > 0) { + oracleObject._metadata = oracleObject._metadata || {}; + oracleObject._metadata.indexes = indexes; + } + + if (!oracleObject._metadata) { + // cleanup the unused _metadata + delete oracleObject._metadata; + } + + return oracleObject; +}; + +function validateExplainValue(explain) { + if (explain) { + // The list of allowed explain values is from node-mongodb-native/lib/explain.js + const explainAllowedValues = [ + 'queryPlanner', + 'queryPlannerExtended', + 'executionStats', + 'allPlansExecution', + false, + true, + ]; + if (!explainAllowedValues.includes(explain)) { + throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Invalid value for explain'); + } + } +} + +export class OracleStorageAdapter implements StorageAdapter { + // private + _onchange: any; + _collectionPrefix: string; + _connectionPool: Pool; + _collections: Map; + + constructor(options: any) { + logger.verbose( + 'OracleStorageAdapter constructor, uri = ' + + options.databaseURI + + ' collectionPrefix = ' + + options.collectionPrefix + ); + this._uri = options.databaseURI; + this._collectionPrefix = options.collectionPrefix; + this._connectionPool = null; + this._collections = new Map(); + } + + _schemaCollection(): Promise { + try { + const collection = this._adaptiveCollection(OracleSchemaCollectionName); + if (schemaCollection === null) { + if (!this._stream && this.enableSchemaHooks) { + // TODO make sure these are all defined + this._stream = collection._orcaleCollection.watch(); + this._stream.on('change', () => this._onchange()); + } + schemaCollection = new OracleSchemaCollection(collection); + } + return schemaCollection; + } catch (error) { + this.handleError(error); + } + } + + listAllCollections(prefix) { + const result = new Array(); + this._collections.forEach(function (value, key) { + if (key.includes(prefix)) { + const array = key.split(prefix); + if (array.length == 2) { + result.push(array[1]); + } else { + result.push(array[0]); + } + } + }); + return result; + } + + async _truncate(collectionName) { + logger.verbose('Storage Adapter _truncate for ' + collectionName); + try { + const collection = this._adaptiveCollection(collectionName); + const result = await collection.truncate(); + logger.verbose('Storage Adapter _truncate for collection ' + collectionName + ' returns ' + JSON.stringify(result)); + return result; + } catch (error) { + logger.error('Storage Adapter _truncate Error for collection' + collectionName + ' ERROR = ' + error); + this.handleError(error); + } + } + + async _drop(collectionName) { + logger.verbose('StorageAdapter _drop ' + collectionName); + try { + const collection = this._adaptiveCollection(collectionName); + const result = await collection.drop(); + if (result) { + // Remove Collection + logger.verbose('Dropping ' + this._collectionPrefix + collection + ' from collectionMap'); + this._collections.delete(this._collectionPrefix + collectionName); + if (collectionName.includes(OracleSchemaCollectionName)) { + schemaCollection = null; + } + } + logger.verbose('StorageAdapter _drop returns ' + result); + return result; + } catch (error) { + logger.error('Storage Adapter _drop Error for ' + collectionName); + this.handleError(error); + } + } + + _adaptiveCollection(name: string): OracleCollection { + let realName; + + if (name.includes(this._collectionPrefix)) { + realName = name; + } else { + realName = this._collectionPrefix + name; + } + // first check if we already have this collection, and if so, just return it + // this will reuse the same collection and its embedded connection, so we don't + // create a connection starvation scenario + if (this._collections.get(realName)) { + logger.verbose('Adaptive Collection returning Existing collection ' + realName); + return this._collections.get(realName); + } + + const collection = new OracleCollection(this, realName); + this._collections.set(realName, collection); + logger.verbose('Adaptive Collection returning Created collection ' + realName); + return collection; + } + + async initialize() { + if (initialized === false) { + const wallet_location = process.env.ORACLE_WALLET_LOCATION; + const client_location = process.env.ORACLE_CLIENT_LOCATION; + + if (typeof client_location === 'undefined') { + throw 'Required Environment Variable, ORACLE_CLIENT_LOCATION, is not defined'; + } + + logger.verbose('wallet location = ' + process.env.ORACLE_WALLET_LOCATION); + logger.verbose('oracle client = ' + process.env.ORACLE_CLIENT_LOCATION); + + try { + if (typeof wallet_location === 'undefined') { + logger.info( + 'No Wallet location specified. Intializing Oracle Client to access Local Database Docker Image' + ); + oracledb.initOracleClient({ + libDir: client_location, + }); + } else { + logger.info( + 'Wallet location specified. Intializing Oracle Client to access Cloud Database Instance' + ); + oracledb.initOracleClient({ + libDir: client_location, + configDir: wallet_location, + }); + } + } catch (error) { + if (error.message.includes('NJS-077')) { + // already initialized - so ignore the error + logger.verbose('oracledb already initialized'); + } else { + logger.error('Error Initalizing Oracle Client: ' + error); + // if we get here, probably should exit the whole server process + process.exit(1); + } + } + initialized = true; + } + } + + async connect() { + if (this.connectionPromise) { + logger.verbose('reusing connection pool ' + JSON.stringify(this._connectionPool)); + logger.verbose(' statistics: ' + JSON.stringify(this._connectionPool.getStatistics())); + return this._connectionPool; + } + + this.initialize(); + + var re = new RegExp('oracledb://[a-zA-Z0-9_]*:[^@:]*@[a-zA-Z0-9_.:/]*$'); + + if (!re.test(this._uri)) { + throw 'Incorrect Connection String Format. Format is oracledb://user:password@tnsname'; + } + + const user = this.getUserFromUri(this._uri); + const pw = this.getPasswordFromUri(this._uri); + const tnsname = this.getTnsNameFromUri(this._uri); + + logger.info('creating a connection pool'); + try { + if (createConnPool) { + createConnPool = false; + this.connectionPromise = await oracledb.createPool({ + poolAlias: 'parse', + user: user, + password: pw, + connectString: tnsname, + poolIncrement: 5, + poolMax: 100, + poolMin: 3, + poolTimeout: 10, + // Use default of 60000 ms + // queueTimeout: 10, + enableStatistics: true, + }); + logger.info('connection pool successfully created'); + this._connectionPool = oracledb.getPool('parse'); + return Promise.resolve(this._connectionPool); + } else { + logger.verbose('Returning connection promise while connecting'); + return this.connectionPromise; + } + } catch (error) { + logger.error('Error Creating Connection Pool: ', error); + throw error; + } + } + + getUserFromUri(uri) { + const myArray = uri.split('//'); + const myArray2 = myArray[1].split(':'); + return myArray2[0]; + } + + getPasswordFromUri(uri) { + const myArray = uri.split(':'); + const myArray2 = myArray[2].split('@'); + return myArray2[0]; + } + + getTnsNameFromUri(uri) { + const myArray = uri.split('@'); + return myArray[1]; + } + + handleError(error: ?(Error | Parse.Error)): Promise { + // if (error && error.code === 13) { + // // Unauthorized error + // delete this.client; + // delete this.database; + // delete this.connectionPromise; + // logger.error('Received unauthorized error', { error: error }); + // } + logger.error('in handleError with error =' + error); +// console.log('in handleError with error =' + error); +// console.trace(); + // What to throw? Maybe need to map ORA msgs to Parse msgs + // throw error.message; + throw error; + } + + classExists(className: string): Promise { + return new Promise(resolve => { + logger.verbose('classExists name = ' + className); + const collections = storageAdapterAllCollections(this); + resolve(collections.includes(className)); + }); + } + + async setClassLevelPermissions(className, CLPs) { + try { + logger.verbose('StorageAdapter setClassLevelPermissions for ' + className); + logger.verbose('setClassLevelPermissions permissions = ' + JSON.stringify(CLPs)); + const newCLPS = '{"_metadata": {"class_permissions":' + JSON.stringify(CLPs) + '}}'; + const newCLPSObj = JSON.parse(newCLPS); + const result = await this._schemaCollection().updateSchema(className, newCLPSObj); + logger.verbose('StorageAdapter setClassLevelPermissions returns ' + result); + return result; + } catch (error) { + logger.error('StorageAdapter setClassLevelPermissions Error for ' + className); + this.handleError(error); + } + } + + async createClass(className: string, schema: SchemaType): Promise { + try { + logger.verbose('StorageAdapter createClass for ' + className); + schema = convertParseSchemaToOracleSchema(schema); + const oracleObject = oracleSchemaFromFieldsAndClassNameAndCLP( + schema.fields, + className, + schema.classLevelPermissions, + schema.indexes + ); + oracleObject._id = className; + const result = await this._schemaCollection().insertSchema(oracleObject); + logger.verbose('StorageAdapter createClass insertSchema result = ' + result); + if (typeof schema.indexes !== 'undefined' && Object.keys(schema.indexes).length > 0) { + if (Array.isArray(schema.indexes)) { + await this.createIndexes(className, schema.indexes); + } else { + const indexes = new Array(schema.indexes); + await this.createIndexes(className, indexes); + } + } + return result; + } catch (error) { + logger.error('StorageAdapter createClass Error for ' + className); + this.handleError(error); + } + } + + async addFieldIfNotExists(className: string, fieldName: string, type: any): Promise { + try { + logger.verbose('StorageAdapter addFieldIfNotExists for ' + className); + const result = await this._schemaCollection().addFieldIfNotExists(className, fieldName, type); + logger.verbose('StorageAdapter addFieldIfNotExists returns ' + result); + await this.createIndexesIfNeeded(className, fieldName, type); + return result; + } catch (error) { + logger.error('StorageAdapter addFieldIfNotExists Error for ' + className); + this.handleError(error); + } + } + + async updateFieldOptions(className: string, fieldName: string, type: any): Promise { + const schemaCollection = this._schemaCollection(); + await schemaCollection.updateFieldOptions(className, fieldName, type); + } + + async deleteClass(className: string): Promise { + try { + logger.verbose('StorageAdapter deleteClass for ' + className); + const result1 = await this._drop(className); + logger.verbose('StorageAdapter deleteClass drop returns ' + result1); + const result = await this._schemaCollection().findAndDeleteSchema(className); + logger.verbose('StorageAdapter deleteClass deleteSchema returns ' + result); + return result; + } catch (error) { + logger.error('StorageAdapter deleteClass Error for ' + className); + this.handleError(error); + } + } + + async deleteAllClasses(fast: boolean) { + // let result; + logger.verbose('entering deleteAllClasses fast = ' + fast); + const collections = storageAdapterAllCollections(this); + return Promise.all( + collections.map(collection => (fast ? this._truncate(collection) : this._drop(collection))) + ); + } + + async deleteFields( + className: string, + schema: SchemaType, + fieldNames: Array + ): Promise { + logger.verbose('StorageAdapter deleteFields for className: ' + className); + logger.verbose('StorageAdapter deleteFields for schema: ' + schema); + logger.verbose('StorageAdapter deleteFields oracleFormatNames = ' + fieldNames); + try { + const collection = this._adaptiveCollection(className); + const result = await collection.deleteFields(fieldNames); + const result1 = await this._schemaCollection().deleteSchemaFields(className, fieldNames); + logger.verbose('StorageAdapter deleteFields collection result = ' + result); + logger.verbose('StorageAdapter deleteFields schemacollection result = ' + result1); + } catch (error) { + logger.error('StorageAdapter deleteFields Error for ' + className); + this.handleError(error); + } + } + + async getAllClasses(): Promise { + try { + const schemaCollection = this._schemaCollection(); + const result = await schemaCollection._fetchAllSchemasFrom_SCHEMA(); + logger.verbose('StorageAdapter getAllClasses returns ' + result); + return result; + } catch (error) { + logger.error('StorageAdapter getAllClasses Error'); + this.handleError(error); + } + } + // getClass(className: string): Promise; + + // TODO: As yet not particularly well specified. Creates an object. Maybe shouldn't even need the schema, + // and should infer from the type. Or maybe does need the schema for validations. Or maybe needs + // the schema only for the legacy mongo format. We'll figure that out later. + async createObject( + className: string, + schema: SchemaType, + object: any, + transactionalSession: ?any + ) { + logger.verbose('StorageAdapter createObject for className: ' + className); + + try { + schema = convertParseSchemaToOracleSchema(schema); + const oracleObject = parseObjectToOracleObjectForCreate(className, object, schema); + const collection = this._adaptiveCollection(className); +// console.log("CDD StorageAdapter create Object calling insert One for className + " + className); + const result = await collection.insertOne(oracleObject, transactionalSession); +// console.log('CDD StorageAdapter createObject insertOne returns: ' + JSON.stringify(result)); + logger.verbose('StorageAdapter createObject insertOne returns: ' + result); + return { ops: [oracleObject] }; + } catch (error) { + // "ORA-00001: unique constraint (ADMIN.index_name) violated" + if (error.errorNum === 1) { + // Duplicate value + const err = new Parse.Error( + Parse.Error.DUPLICATE_VALUE, + 'A duplicate value for a field with unique values was provided' + ); + err.underlyingError = error; + if (error.message) { + const matches = error.message.match(/index:[\sa-zA-Z0-9_\-\.]+\$?([a-zA-Z_-]+)_1/); + if (matches && Array.isArray(matches)) { + err.userInfo = { duplicated_field: matches[1] }; + } + } + this.handleError(err); + } + console.log("Error = " + error); + console.trace(); + this.handleError(error); + } + } + + // deleteObjectsByQuery( + // className: string, + // schema: SchemaType, + // query: QueryType, + // transactionalSession: ?any + // ): Promise; + // updateObjectsByQuery( + // className: string, + // schema: SchemaType, + // query: QueryType, + // update: any, + // transactionalSession: ?any + // ): Promise<[any]>; + + async findOneAndUpdate(className, schema, query, update, transactionalSession) { + try { + logger.verbose('StorageAdapter findOneAndUpdate for ' + className); + let oraWhere = transformWhere(className, query, schema); + const oraUpdate = transformUpdate(className, update, schema); + // Check if this query needs Oracle Storage Adapter _wperm syntax + oraWhere = this.checkUserQuery(oraWhere); + const collection = this._adaptiveCollection(className); + const result = await collection.findOneAndUpdate(oraWhere, oraUpdate, transactionalSession); + logger.verbose('StorageAdapter findOneAndUpdate returns ' + JSON.stringify(result)); + return result; + } catch (error) { + logger.error('StorageAdapter indOneAndUpdate Error for ' + className); + this.handleError(error); + } + } + + /* + Parse has ACL formats that are part of a query which causes an error which was fixed in + https://bug.oraclecorp.com/pls/bug/webbug_print.show?c_rptno=34596223 + + Basically, Oracle cannot handle a null as part of an in operator clause + {_id: "TV5CazXRtP",_wperm: {$in: [null,"*","tE8wEhXmJg","role:Admins",],},} + + This needs to be modified to + {_id: "tE8wEhXmJg",$or : [{_wperm: {"$in": [ "*", "tE8wEhXmJg" ]}}, {_wperm : null}]} + to work with Oracle SODA + + Waiting on maintenance update to pick up the fix. + In the meantime, checkUserQuery will fix up the query + + More here + https://orahub.oci.oraclecorp.com/ora-microservices-dev/mbaas-parse-server/-/wikis/Error:-ORA-40596:-error-occurred-in-JSON-processing-jznEngValCmpWithTypCnv:invTyp + */ + + checkUserQuery(query) { + logger.verbose('in StorageAdapter checkUserQuery'); + logger.verbose('Input query = ' + JSON.stringify(query)); + const newObj = new Object(); + const queryObj = JSON.parse(JSON.stringify(query)); + let checkNull = false; + + for (const x in queryObj) { + if (x === '_wperm' && typeof queryObj[x] === 'object') { + const myArray = []; + const json = JSON.parse(JSON.stringify(queryObj[x])); + + for (const y in json) { + if (y === '$in') { + if (json[y].length >= 2) { + for (let i = 0; i < json[y].length; i++) { + if (json[y][i] === null) { + checkNull = true; + } else { + myArray.push(json[y][i]); + } + } + } + } + if (json[y].length !== myArray.length) { + let temp; + if (checkNull) { + // Case where no Perms exists on the document + temp = `[{"_wperm":{"$in":${JSON.stringify( + myArray + )}}},{"_wperm":null},{"_wperm":{"$exists":false}}]`; + } else { + temp = `[{"_wperm":{"$in":${JSON.stringify(myArray)}}},{"_wperm":null}]`; + } + delete queryObj['_wperm']; + newObj['$or'] = JSON.parse(temp); + } else { + newObj[x] = queryObj[x]; + } + } + } else { + newObj[x] = queryObj[x]; + } + if (x === '_rperm' && typeof queryObj[x] === 'object') { + const myArray = []; + const json = JSON.parse(JSON.stringify(queryObj[x])); + + for (const y in json) { + if (y === '$in') { + if (json[y].length >= 2) { + for (let i = 0; i < json[y].length; i++) { + if (json[y][i] === null) { + checkNull = true; + } else { + myArray.push(json[y][i]); + } + } + } + } + if (json[y].length !== myArray.length) { + let rpermOr; + delete newObj['_rperm']; + if (checkNull) { + // Case where no Perms exists on the document + rpermOr = `[{"_rperm":{"$in":${JSON.stringify( + myArray + )}}},{"_rperm":null},{"_rperm":{"$exists":false}}]`; + } else { + rpermOr = `[{"_rperm":{"$in":${JSON.stringify(myArray)}}},{"_rperm":null}]`; + } + if (Object.prototype.hasOwnProperty.call(newObj, '$or')) { + // $and the existing $or with the _rperm $or + const originalOr = JSON.stringify(newObj['$or']); + const andString = `[{"$or":${originalOr}},{"$or":${rpermOr}}]`; + // TODO: replacing the $and without checking if it existed + // look at lodash to merge + newObj['$and'] = JSON.parse(andString); + delete newObj['$or']; + } else { + newObj['$or'] = JSON.parse(rpermOr); + } + } else { + newObj[x] = queryObj[x]; + } + } + } else { + newObj[x] = queryObj[x]; + } + } + logger.verbose('Return query = ' + JSON.stringify(newObj)); + return newObj; + } + + async upsertOneObject(className, schema, query, update, transactionalSession) { + try { + logger.verbose('StorageAdapter upsertOneObject for Collection ' + className); + schema = convertParseSchemaToOracleSchema(schema); + const oraWhere = transformWhere(className, query, schema); + const oraUpdate = transformUpdate(className, update, schema); + const collection = this._adaptiveCollection(className); + const result = await collection.upsertOne(oraWhere, oraUpdate, transactionalSession); + logger.verbose('StorageAdapter upsertOneObject returns ' + result); + return result; + } catch (error) { + logger.error('StorageAdapter upsertOneObject Error for ' + className); + this.handleError(error); + } + } + + async deleteObjectsByQuery(className, schema, query, transactionalSession) { + try { + logger.verbose('StorageAdapter deleteObjectsByQuery for ' + className); + schema = convertParseSchemaToOracleSchema(schema); + let oraWhere = transformWhere(className, query, schema); + // Check if query needs Oracle Storage Adapter _wperm syntax + oraWhere = this.checkUserQuery(oraWhere); + const collection = this._adaptiveCollection(className); + const result = await collection.deleteObjectsByQuery(oraWhere, transactionalSession); + logger.verbose('StorageAdapter deleteObjectsByQuery returns ' + result); + return result; + } catch (error) { + logger.error('StorageAdapter deleteObjectsByQuery Error for ' + className); + this.handleError(error); + } + } + + // Executes a find. Accepts: className, query in Parse format, and { skip, limit, sort }. + find( + className: string, + schema: SchemaType, + query: QueryType, + { skip, limit, sort, keys, readPreference, hint, caseInsensitive, explain }: QueryOptions + ): Promise { + // try { + logger.verbose('StorageAdapter find for ' + className); + validateExplainValue(explain); + schema = convertParseSchemaToOracleSchema(schema); + logger.verbose('query = ' + JSON.stringify(query)); + + // start hack + // this is a temporary hack while i work on the _rperm stuff + // remove that from the query if present + //CDB + //to preserve the original query to hack for $containedBy + queryBackup = query; + //CDB-END + // end hack + + let oracleWhere = transformWhere(className, query, schema); + // Check if this query needs Oracle Storage Adapter _wperm syntax + oracleWhere = this.checkUserQuery(oracleWhere); + logger.verbose('oracleWhere = ' + JSON.stringify(oracleWhere)); + // fix 15-11 + const oracleSort = _.mapKeys(sort, (value, fieldName) => + transformKey(className, fieldName, schema) + ); + + const sortTypes = new Object(); + for (const s in sort) { + let schemaFieldName; + let sortType = 'string'; + if (s.split('.').length > 1) { + schemaFieldName = s.split('.')[0]; + } else { + schemaFieldName = s; + } + const schemaTypeEntry = schema.fields[schemaFieldName]; + const schemaType = schemaTypeEntry[Object.keys(schemaTypeEntry)[0]]; + if (schemaType === 'Number') { + sortType = 'number'; + } + sortTypes[s] = sortType; + } + + logger.verbose('Make linter happy by using keys = ' + keys); + + const oracleKeys = keys; + + logger.verbose('oracleKeys = ' + JSON.stringify(oracleKeys)); + logger.verbose('make linter ignore ' + readPreference); + + const collection = this._adaptiveCollection(className); + return collection + .find(oracleWhere, { + skip, + limit, + sort: oracleSort, + keys: oracleKeys, + maxTimeMS: this._maxTimeMS, + readPreference: null, + hint, + caseInsensitive, + explain, + sortTypes, + }) + .then(objects => { + logger.verbose('after the find, objects = ' + JSON.stringify(objects)); + logger.verbose('about to map oracleObjectToParseObject'); + let result = objects.map(object => oracleObjectToParseObject(className, object, schema)); + logger.verbose('result = ' + JSON.stringify(result)); + + //CDB + //$containedBy issue: remove extra documents from the collection with Diff between two Sets + if (JSON.stringify(queryBackup).indexOf('$containedBy') > -1) { + for (var prop in queryBackup) { + if (!(/*typeof*/ (queryBackup[prop].$containedBy === undefined))) { + //let arr = queryBackup[prop].$containedBy; + //11-11fix for 'containedBy number array' + var filteredResult = result.filter(function (myObject) { + const diff = myObject[prop].filter( + x => !queryBackup[prop].$containedBy.includes(x) + ); + return diff.length == 0; + }); + result = filteredResult; + /* + for (const r in result) { + const myObject = result[r]; + const diff = myObject[prop].filter(x => !queryBackup[prop].$containedBy.includes(x)); + + if (diff.length > 0) { + //remove document + result.splice(r, 1); + } + } + */ + //END11-11 fix + } + } + } + //CDB-END + + //CDB + //Delete all fields not in oracleKeys + if (!(typeof oracleKeys === 'undefined')) { + for (const r in result) { + logger.verbose('oracleKeys to mantain = ' + JSON.stringify(oracleKeys)); + //to be cleaned + const myObject = result[r]; + + var oracleKeysSet = new Set(oracleKeys); + oracleKeysSet.add('createdAt'); + oracleKeysSet.add('updatedAt'); + oracleKeysSet.add('objectId'); + + var keysResult = new Set(Object.keys(myObject)); + logger.verbose('keys remained = ' + JSON.stringify(keysResult)); + + const diff = new Set([...keysResult].filter(element => !oracleKeysSet.has(element))); + logger.verbose('keys toDel = ' + JSON.stringify(diff)); + + for (var iter = diff.values(), toDel = null; (toDel = iter.next().value);) { + // Do NOT remove _rperm and _wperm. DatabaseController uses them to value ParseObject.ACL + if (!(toDel === '_rperm' || toDel === '_wperm')) { + delete myObject[toDel]; + } + } + logger.verbose('properties remained = ' + JSON.stringify(myObject)); + } + } + //CDB-END + logger.verbose('StorageAdapter find returns ' + result); + return result; + }) + .catch(err => this.handleError(err)); + } + + async setIndexesFromOracle(className: string) { + try { + logger.verbose('StorageAdapter setIndexesFromOracle for ' + className); + const indexes = await this.getIndexes(className); + const result = await this._schemaCollection().updateSchema(className, { + _metadata: { indexes: indexes }, + }); + logger.verbose('StorageAdapter setIndexesFromOracle returns ' + result); + return result; + } catch (error) { + logger.error('StorageAdapter setIndexesFromOracle throws for className ' + className); + this.handleError(error); + } + } + + createTextIndexesIfNeeded(className: string, query: QueryType, schema: any): Promise { + logger.verbose('entered createTextIndexesIfNeeded query = ' + JSON.stringify(query)); + for (const fieldName in query) { + logger.verbose('processing field ' + fieldName); + if (!query[fieldName] || !query[fieldName].$text) { + continue; + } + const existingIndexes = schema.indexes; + logger.verbose('existingIndexes = ' + existingIndexes); + for (const key in existingIndexes) { + const index = existingIndexes[key]; + if (Object.prototype.hasOwnProperty.call(index, fieldName)) { + return Promise.resolve(); + } + } + const indexName = `${fieldName}_text`; + const textIndex = { + [indexName]: { [fieldName]: 'text' }, + }; + return this.setIndexesWithSchemaFormat( + className, + textIndex, + existingIndexes, + schema.fields + ).catch(error => { + logger.error('got error ' + JSON.stringify(error)); + if (error.code === 85) { + // Index exist with different options + return this.setIndexesFromOracle(className); + } + throw error; + }); + } + return Promise.resolve(); + } + /* + TODO: + Are multiple indexes processed? I think not because of + const fieldName = Object.keys(indexCreationRequest)[0]; + Also, can the code creating the indexspec in + ensureIndex and ensureUniqueness + be combined into 1 method + https://orahub.oci.oraclecorp.com/ora-microservices-dev/mbaas-parse-server/-/issues/35 + */ + async ensureIndex( + className: string, + schema: SchemaType, + fieldNames: string[], + indexName: ?string, + caseInsensitive: boolean = false, + options?: Object = {} + ): Promise { + try { + logger.verbose('StorageAdapter ensureIndex for ' + className); + schema = convertParseSchemaToOracleSchema(schema); + const indexCreationRequest = {}; + const oracleFieldNames = fieldNames.map(fieldName => + transformKey(className, fieldName, schema) + ); + oracleFieldNames.forEach(fieldName => { + indexCreationRequest[fieldName] = options.indexType !== undefined ? options.indexType : 1; + }); + + logger.verbose( + 'use these to make linter happy ' + + JSON.stringify(indexName) + + ' ' + + JSON.stringify(caseInsensitive) + ); + + const fieldName = Object.keys(indexCreationRequest)[0]; + // TODO: This code made maxLength = 1 which caused all kinds of breakage + // Determine the better way to do this but for now, default to 2000 which is what the JSON console did + // const maxLength = indexCreationRequest[Object.keys(indexCreationRequest)[0]]; +// const maxLength = 2000; + const indexRequest = { + name: fieldName, + fields: [ + { + path: fieldName, +// maxlength: maxLength, + }, + ], + unique: true, + }; + const collection = this._adaptiveCollection(className); + const result = await collection._createIndex(indexRequest); + logger.verbose('StorageAdapter ensureIndex returns ' + result); + return result; + } catch (error) { + logger.error('StorageAdapter ensureIndex throws for className ' + className); + this.handleError(error); + } + } + + // Create a unique index. Unique indexes on nullable fields are not allowed. Since we don't + // currently know which fields are nullable and which aren't, we ignore that criteria. + // As such, we shouldn't expose this function to users of parse until we have an out-of-band + // Way of determining if a field is nullable. Undefined doesn't count against uniqueness, + // which is why we use sparse indexes. + async ensureUniqueness(className: string, schema: SchemaType, fieldNames: string[]) { + try { + logger.verbose('StorageAdapter ensureUniqueness for ' + className); + schema = convertParseSchemaToOracleSchema(schema); + const indexCreationRequest = {}; + const oracleFieldNames = fieldNames.map(fieldName => + transformKey(className, fieldName, schema) + ); + oracleFieldNames.forEach(fieldName => { + indexCreationRequest[fieldName] = 1; + }); + const fieldName = Object.keys(indexCreationRequest)[0]; + // TODO: This code made maxLength = 1 which caused all kinds of breakage + // Determine the better way to do this but for now, default to 2000 which is what the JSON console did + // const maxLength = indexCreationRequest[Object.keys(indexCreationRequest)[0]]; +// const maxLength = 2000; + const indexRequest = { + name: fieldName, + fields: [ + { + path: fieldName, +// maxlength: maxLength, + }, + ], + unique: true, + }; + + const collection = this._adaptiveCollection(className); + const result = await collection._ensureSparseUniqueIndexInBackground(indexRequest); + logger.verbose('StorageAdapter ensureUniqueness returns ' + result); + return result; + } catch (error) { + logger.error('StorageAdapter ensureUniqueness throws for className ' + className); + this.handleError(error); + } + } + + async count(className, schema, query, readPreference, hint) { + const skip = 0; + const limit = 0; + const sort = {}; + let keys; + const caseInsensitive = false; + const explain = false; + // See line 1183 in DatabaseController, it passes null in query + if (query === null) { + query = {}; + } + return this.find(className, schema, query, { + skip, + limit, + sort, + keys, + readPreference, + hint, + caseInsensitive, + explain, + }) + .then(collection => { + return collection.length; + }) + .catch(err => { + logger.error('in the catch block after collection.find for count()'); + this.handleError(err); + }); + } + + //CDB Fix 18-11 + async distinct(className, schema, query, fieldName) { + try { + logger.verbose('StorageAdapter distinct for ' + className); + schema = convertParseSchemaToOracleSchema(schema); + const isPointerField = + schema.fields[fieldName] && schema.fields[fieldName].type === 'Pointer'; + const transformField = transformKey(className, fieldName, schema); + const collection = this._adaptiveCollection(className); + let objects = collection.distinct(transformField, transformWhere(className, query, schema)); + objects = objects.filter(obj => obj != null); + logger.verbose('StorageAdapter distinct returns ' + objects); + return objects.map(object => { + if (isPointerField) { + return transformPointerString(schema, fieldName, object); + } + + return oracleObjectToParseObject(className, object, schema); + }); + } catch (error) { + logger.error('StorageAdapter distinct throws for className ' + className); + this.handleError(error); + } + } + + //TO BE TESTED + /* + aggregate( + className: string, + schema: any, + pipeline: any, + readPreference: ?string, + hint: ?mixed, + explain?: boolean + ) { + validateExplainValue(explain); + let isPointerField = false; + pipeline = pipeline.map(stage => { + if (stage.$group) { + stage.$group = this._parseAggregateGroupArgs(schema, stage.$group); + if ( + stage.$group._id && + typeof stage.$group._id === 'string' && + stage.$group._id.indexOf('$_p_') >= 0 + ) { + isPointerField = true; + } + } + if (stage.$match) { + stage.$match = this._parseAggregateArgs(schema, stage.$match); + } + if (stage.$project) { + stage.$project = this._parseAggregateProjectArgs(schema, stage.$project); + } + if (stage.$geoNear && stage.$geoNear.query) { + stage.$geoNear.query = this._parseAggregateArgs(schema, stage.$geoNear.query); + } + return stage; + }); + readPreference = this._parseReadPreference(readPreference); + return this._adaptiveCollection(className) + .then(collection => + collection.aggregate(pipeline, { + readPreference, + maxTimeMS: this._maxTimeMS, + hint, + explain, + }) + ) + .then(results => { + results.forEach(result => { + if (Object.prototype.hasOwnProperty.call(result, '_id')) { + if (isPointerField && result._id) { + result._id = result._id.split('$')[1]; + } + if ( + result._id == null || + result._id == undefined || + (['object', 'string'].includes(typeof result._id) && _.isEmpty(result._id)) + ) { + result._id = null; + } + result.objectId = result._id; + delete result._id; + } + }); + return results; + }) + .then(objects => objects.map(object => oracleObjectToParseObject(className, object, schema))) + .catch(err => this.handleError(err)); + } +*/ + //CDB-END + + performInitialization(): Promise { + return Promise.resolve(); + } + + watch(callback: () => void): void { + this._onchange = callback; + } + + createIndexPaths(index) { + var paths = Array(); + + Object.keys(index).forEach(key => { + paths.push({ + path: key, +// maxlength: 2000, + }); + }); + return paths; + } + + async createIndexes(className: string, indexes: any) { + try { + logger.verbose('StorageAdapter createIndexes for ' + className); + var promises = Array(); + const collection = this._adaptiveCollection(className); + + for (let idx = 0; idx < indexes.length; idx++) { + const index = indexes[idx]; + + let idxName = Object.keys(index)[0]; + let paths; + /* + 2 index formats can be passed in + { key: { aString: 1 }, name: 'name1' } + { name1: { aString: 1 } } + Handle them both + */ + if (idxName === 'key') { + paths = index[idxName]; + idxName = index['name']; + } else { + paths = index[idxName]; + } + + const indexRequest = { + name: idxName, + fields: this.createIndexPaths(paths), + unique: true, + }; + const promise = await collection._createIndex(indexRequest); + promises.push(promise); + } + const results = await Promise.all(promises); + logger.verbose('StorageAdapter createIndexes returns ' + results); + return results; + } catch (error) { + logger.error('StorageAdapter createIndexes throws for className ' + className); + this.handleError(error); + } + } + + async getIndexes(className: string, connection: ?any): Promise { + try { + logger.verbose( + 'StorageAdapter getIndexes for ' + className + ' Connection = ' + connection + ); + const collection = this._adaptiveCollection(className); + const result = collection.getIndexes(className); + logger.verbose('StorageAdapter getIndexes returns ' + result); + return result; + } catch (error) { + logger.error('StorageAdapter getIndexes throws for className ' + className); + this.handleError(error); + } + } + + updateSchemaWithIndexes() { + return this.getAllClasses() + .then(classes => { + const promises = classes.map(schema => { + return this.setIndexesFromOracle(schema.className); + }); + return Promise.all(promises); + }) + .catch(err => this.handleError(err)); + } + + async setIndexesWithSchemaFormat( + className: string, + submittedIndexes: any, + existingIndexes: any = {}, + fields: any + ): Promise { + try { + logger.verbose('StorageAdapter setIndexesWithSchemaFormat for ' + className); + if (submittedIndexes === undefined) { + return Promise.resolve(); + } + if (Object.keys(existingIndexes).length === 0) { + existingIndexes = { _id_: { _id: 1 } }; + } + const deletePromises = []; + const insertedIndexes = []; + + for (let i = 0; i < Object.keys(submittedIndexes).length; i++) { + const name = Object.keys(submittedIndexes)[i]; + const field = submittedIndexes[name]; + if (existingIndexes[name] && field.__op !== 'Delete') { + throw new Parse.Error(Parse.Error.INVALID_QUERY, `Index ${name} exists, cannot update.`); + } + if (!existingIndexes[name] && field.__op === 'Delete') { + throw new Parse.Error( + Parse.Error.INVALID_QUERY, + `Index ${name} does not exist, cannot delete.` + ); + } + if (field.__op === 'Delete') { + const promise = await this.dropIndex(className, name); + deletePromises.push(promise); + delete existingIndexes[name]; + } else { + Object.keys(field).forEach(key => { + if ( + !Object.prototype.hasOwnProperty.call( + fields, + key.indexOf('_p_') === 0 ? key.replace('_p_', '') : key + ) + ) { + throw new Parse.Error( + Parse.Error.INVALID_QUERY, + `Field ${key} does not exist, cannot add index.` + ); + } + }); + existingIndexes[name] = field; + insertedIndexes.push({ + key: field, + name, + }); + } + } + if (insertedIndexes.length > 0) { + const insertPromise = await this.createIndexes(className, insertedIndexes); + logger.verbose( + 'StorageAdapter setIndexesWithSchemaFormat insertPromise = ' + insertPromise + ); + } + // Munge existing indexs into expected format based on Shema.spec.js tests + const newExistindIndexes = + '{"_metadata": {"indexes":' + JSON.stringify(existingIndexes) + '}}'; + const newExistingIndexesObj = JSON.parse(newExistindIndexes); + await Promise.all(deletePromises); + const result = await this._schemaCollection().updateSchemaIndexes( + className, + newExistingIndexesObj + ); + logger.verbose('StorageAdapter setIndexesWithSchemaFormat returns ' + result); + return result; + } catch (error) { + logger.error('StorageAdapter setIndexesWithSchemaFormat throws for className ' + className); + this.handleError(error); + } + } + + // createTransactionalSession(): Promise; + // commitTransactionalSession(transactionalSession: any): Promise; + // abortTransactionalSession(transactionalSession: any): Promise; + + async dropIndex(className: string, index: any) { + try { + logger.verbose('StorageAdapter dropIndex for ' + className); + const collection = this._adaptiveCollection(className); + const result = await collection.dropIndex(index); + logger.verbose('StorageAdapter dropIndex returns ' + result); + return result; + } catch (error) { + logger.error('StorageAdapter dropIndex throws for className ' + className); + this.handleError(error); + } + } + + createIndexesIfNeeded(className, fieldName, type) { + // The original Method impl from Mongo below + // Not sure if we need the 2dsphere index for Geo, may be Mongo specific + logger.verbose( + 'createIndexesIfNeeded use className, fieldName and type to make linter happy ' + + className + + ' ' + + fieldName + + ' ' + + type + ); + /* if (type && type.type === 'Polygon') { + const index = { + [fieldName]: '2dsphere' + }; + return this.createIndex(className, index); + }*/ + return Promise.resolve(); + } +} + +export default OracleStorageAdapter; diff --git a/package.json b/package.json new file mode 100644 index 000000000..9ad1f110e --- /dev/null +++ b/package.json @@ -0,0 +1,171 @@ +{ + "name": "parse-server", + "version": "7.0.0-alpha.5", + "description": "An express module providing a Parse-compatible API server", + "main": "lib/index.js", + "repository": { + "type": "git", + "url": "https://github.com/parse-community/parse-server" + }, + "files": [ + "bin/", + "lib/", + "public_html/", + "views/", + "LICENSE", + "NOTICE", + "postinstall.js", + "README.md" + ], + "license": "Apache-2.0", + "dependencies": { + "@babel/eslint-parser": "7.21.8", + "@graphql-tools/merge": "8.4.1", + "@graphql-tools/schema": "9.0.4", + "@graphql-tools/utils": "8.12.0", + "@graphql-yoga/node": "2.6.0", + "@parse/fs-files-adapter": "1.2.2", + "@parse/push-adapter": "5.0.2", + "bcryptjs": "2.4.3", + "body-parser": "1.20.2", + "commander": "10.0.1", + "cors": "2.8.5", + "deepcopy": "2.1.0", + "express": "4.18.2", + "express-rate-limit": "6.7.0", + "follow-redirects": "1.15.4", + "graphql": "16.8.1", + "graphql-list-fields": "2.0.2", + "graphql-relay": "0.10.0", + "graphql-tag": "2.12.6", + "intersect": "1.0.1", + "jsonwebtoken": "9.0.0", + "jwks-rsa": "2.1.5", + "ldapjs": "2.3.3", + "lodash": "4.17.21", + "lru-cache": "9.1.1", + "mime": "3.0.0", + "mongodb": "5.9.0", + "mustache": "4.2.0", + "oracledb": "^6.2.0", + "otpauth": "9.1.2", + "parse": "4.1.0", + "path-to-regexp": "6.2.1", + "pg-monitor": "2.0.0", + "pg-promise": "11.5.0", + "pluralize": "8.0.0", + "rate-limit-redis": "3.0.2", + "redis": "4.6.6", + "semver": "7.5.2", + "subscriptions-transport-ws": "0.11.0", + "tv4": "1.3.0", + "uuid": "9.0.0", + "winston": "3.8.2", + "winston-daily-rotate-file": "4.7.1", + "ws": "8.13.0" + }, + "devDependencies": { + "@actions/core": "1.9.1", + "@apollo/client": "3.6.1", + "@babel/cli": "7.10.0", + "@babel/core": "7.20.2", + "@babel/plugin-proposal-object-rest-spread": "7.10.0", + "@babel/plugin-transform-flow-strip-types": "7.9.0", + "@babel/preset-env": "7.10.0", + "@saithodev/semantic-release-backmerge": "2.1.2", + "@semantic-release/changelog": "5.0.1", + "@semantic-release/commit-analyzer": "8.0.1", + "@semantic-release/git": "9.0.0", + "@semantic-release/github": "7.2.3", + "@semantic-release/npm": "7.1.3", + "@semantic-release/release-notes-generator": "9.0.3", + "all-node-versions": "11.3.0", + "apollo-upload-client": "17.0.0", + "bcrypt-nodejs": "0.0.3", + "clean-jsdoc-theme": "4.2.7", + "cross-env": "7.0.2", + "deep-diff": "1.0.2", + "eslint": "8.26.0", + "eslint-plugin-flowtype": "8.0.3", + "flow-bin": "0.119.1", + "form-data": "3.0.0", + "graphql-tag": "2.12.6", + "husky": "4.3.8", + "jasmine": "3.5.0", + "jasmine-spec-reporter": "7.0.0", + "jsdoc": "3.6.3", + "jsdoc-babel": "0.5.0", + "lint-staged": "10.2.3", + "madge": "5.0.1", + "mock-files-adapter": "file:spec/dependencies/mock-files-adapter", + "mock-mail-adapter": "file:spec/dependencies/mock-mail-adapter", + "mongodb-runner": "5.4.4", + "mongodb-version-list": "1.0.0", + "node-abort-controller": "3.0.1", + "node-fetch": "3.2.10", + "nyc": "15.1.0", + "prettier": "2.0.5", + "semantic-release": "17.4.6", + "yaml": "1.10.0" + }, + "scripts": { + "ci:check": "node ./ci/ciCheck.js", + "ci:checkNodeEngine": "node ./ci/nodeEngineCheck.js", + "ci:definitionsCheck": "node ./ci/definitionsCheck.js", + "definitions": "node ./resources/buildConfigDefinitions.js && prettier --write 'src/Options/*.js'", + "docs": "jsdoc -c ./jsdoc-conf.json", + "lint": "flow && eslint --cache ./", + "lint-fix": "eslint --fix --cache ./", + "build": "babel src/ -d lib/ --copy-files", + "watch": "babel --watch src/ -d lib/ --copy-files", + "test:mongodb:runnerstart": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=$npm_config_dbversion} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} mongodb-runner start -t ${MONGODB_TOPOLOGY} --version ${MONGODB_VERSION} -- --port 27017", + "test:mongodb:testonly": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=$npm_config_dbversion} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} TESTING=1 jasmine", + "test:mongodb": "npm run test:mongodb:runnerstart --dbversion=$npm_config_dbversion && npm run test:mongodb:testonly --dbversion=$npm_config_dbversion", + "test:mongodb:4.2.19": "npm run test:mongodb --dbversion=4.2.19", + "test:mongodb:4.4.13": "npm run test:mongodb --dbversion=4.4.13", + "test:mongodb:5.3.2": "npm run test:mongodb --dbversion=5.3.2", + "test:mongodb:6.0.2": "npm run test:mongodb --dbversion=6.0.2", + "test:mongodb:7.0.1": "npm run test:mongodb --dbversion=7.0.1", + "pretest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.3.2} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} mongodb-runner start -t ${MONGODB_TOPOLOGY} --version ${MONGODB_VERSION} -- --port 27017", + "testonly": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.3.2} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} TESTING=1 jasmine", + "testoracle": "cross-env PARSE_SERVER_TEST_DB=oracle PARSE_SERVER_TEST_TIMEOUT=60000 ORACLEDB_VERSION=${ORACLEDB_VERSION:=19.3.0.0} TESTING=1 jasmine", + "test": "npm run testonly", + "posttest": "cross-env mongodb-runner stop --all", + "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.3.2} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} TESTING=1 nyc jasmine", + "start": "node ./bin/parse-server", + "prettier": "prettier --write {src,spec}/{**/*,*}.js", + "prepare": "npm run build", + "postinstall": "node -p 'require(\"./postinstall.js\")()'", + "madge:circular": "node_modules/.bin/madge ./src --circular" + }, + "engines": { + "node": ">=14.21.0 <17 || >=18 <19" + }, + "bin": { + "parse-server": "bin/parse-server" + }, + "optionalDependencies": { + "@node-rs/bcrypt": "1.1.0" + }, + "collective": { + "type": "opencollective", + "url": "https://opencollective.com/parse-server", + "logo": "https://opencollective.com/parse-server/logo.txt?reverse=true&variant=binary" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parse-server" + }, + "husky": { + "hooks": { + "pre-commit": "lint-staged" + } + }, + "lint-staged": { + "{src,spec}/{**/*,*}.js": [ + "prettier --write", + "eslint --fix --cache", + "git add" + ] + } +}