diff --git a/app/org/thp/cortex/Module.scala b/app/org/thp/cortex/Module.scala
index dcf54bf31..5bd7fb474 100644
--- a/app/org/thp/cortex/Module.scala
+++ b/app/org/thp/cortex/Module.scala
@@ -17,7 +17,7 @@ import play.api.libs.concurrent.AkkaGuiceSupport
import play.api.{Configuration, Environment, Logger, Mode}
import java.lang.reflect.Modifier
-import scala.collection.JavaConverters._
+import scala.jdk.CollectionConverters._
class Module(environment: Environment, configuration: Configuration) extends AbstractModule with ScalaModule with AkkaGuiceSupport {
diff --git a/app/org/thp/cortex/controllers/AssetCtrl.scala b/app/org/thp/cortex/controllers/AssetCtrl.scala
index 065aac1c4..149eef1c4 100644
--- a/app/org/thp/cortex/controllers/AssetCtrl.scala
+++ b/app/org/thp/cortex/controllers/AssetCtrl.scala
@@ -13,7 +13,7 @@ trait AssetCtrl {
}
@Singleton
-class AssetCtrlProd @Inject() (errorHandler: HttpErrorHandler, meta: AssetsMetadata) extends Assets(errorHandler, meta) with AssetCtrl {
+class AssetCtrlProd @Inject() (errorHandler: HttpErrorHandler, meta: AssetsMetadata, env: Environment) extends Assets(errorHandler, meta, env) with AssetCtrl {
def get(file: String): Action[AnyContent] = at("/www", file)
}
diff --git a/app/org/thp/cortex/controllers/StatusCtrl.scala b/app/org/thp/cortex/controllers/StatusCtrl.scala
index 83b25b4b9..a60a47f3a 100644
--- a/app/org/thp/cortex/controllers/StatusCtrl.scala
+++ b/app/org/thp/cortex/controllers/StatusCtrl.scala
@@ -1,20 +1,20 @@
package org.thp.cortex.controllers
-import scala.concurrent.{ExecutionContext, Future}
+import com.sksamuel.elastic4s.ElasticDsl
+import org.elastic4play.controllers.Authenticated
+import org.elastic4play.services.AuthSrv
+import org.elastic4play.services.auth.MultiAuthSrv
+import org.elasticsearch.client.Node
+import org.thp.cortex.models.{Roles, Worker, WorkerType}
+import org.thp.cortex.services.WorkerSrv
import play.api.Configuration
import play.api.http.Status
import play.api.libs.json.Json.toJsFieldJsValueWrapper
-import play.api.libs.json.{JsBoolean, JsNull, JsString, Json}
+import play.api.libs.json.{JsBoolean, JsString, Json}
import play.api.mvc.{AbstractController, Action, AnyContent, ControllerComponents}
-import com.sksamuel.elastic4s.ElasticDsl
-import org.elastic4play.controllers.Authenticated
import javax.inject.{Inject, Singleton}
-import org.elasticsearch.client.Node
-import org.thp.cortex.models.{Roles, Worker, WorkerType}
-import org.elastic4play.services.AuthSrv
-import org.elastic4play.services.auth.MultiAuthSrv
-import org.thp.cortex.services.WorkerSrv
+import scala.concurrent.ExecutionContext
@Singleton
class StatusCtrl @Inject() (
diff --git a/app/org/thp/cortex/models/BaseConfig.scala b/app/org/thp/cortex/models/BaseConfig.scala
index 1f5164955..578aef7ec 100644
--- a/app/org/thp/cortex/models/BaseConfig.scala
+++ b/app/org/thp/cortex/models/BaseConfig.scala
@@ -5,10 +5,8 @@ import scala.concurrent.duration.Duration
import play.api.Configuration
import play.api.libs.json._
-import org.elastic4play.utils.Collection.distinctBy
-
case class BaseConfig(name: String, workerNames: Seq[String], items: Seq[ConfigurationDefinitionItem], config: Option[WorkerConfig]) {
- def +(other: BaseConfig) = BaseConfig(name, workerNames ++ other.workerNames, distinctBy(items ++ other.items)(_.name), config.orElse(other.config))
+ def +(other: BaseConfig): BaseConfig = BaseConfig(name, workerNames ++ other.workerNames, (items ++ other.items).distinctBy(_.name), config.orElse(other.config))
}
object BaseConfig {
diff --git a/app/org/thp/cortex/models/Job.scala b/app/org/thp/cortex/models/Job.scala
index eaaea43b4..90e8f1672 100644
--- a/app/org/thp/cortex/models/Job.scala
+++ b/app/org/thp/cortex/models/Job.scala
@@ -1,19 +1,17 @@
package org.thp.cortex.models
import scala.util.Try
-
-import play.api.libs.json.{JsObject, JsString, Json}
+import play.api.libs.json.{Format, JsObject, JsString, Json}
import javax.inject.{Inject, Singleton}
import org.thp.cortex.models.JsonFormat.workerTypeFormat
-
import org.elastic4play.models.JsonFormat.enumFormat
import org.elastic4play.models.{AttributeDef, EntityDef, HiveEnumeration, ModelDef, AttributeFormat => F, AttributeOption => O}
object JobStatus extends Enumeration with HiveEnumeration {
type Type = Value
val Waiting, InProgress, Success, Failure, Deleted = Value
- implicit val reads = enumFormat(this)
+ implicit val reads: Format[Value] = enumFormat(this)
}
trait JobAttributes {
diff --git a/app/org/thp/cortex/models/Organization.scala b/app/org/thp/cortex/models/Organization.scala
index 32934491f..6c2d4c139 100644
--- a/app/org/thp/cortex/models/Organization.scala
+++ b/app/org/thp/cortex/models/Organization.scala
@@ -1,12 +1,9 @@
package org.thp.cortex.models
import javax.inject.{Inject, Provider, Singleton}
-
import scala.concurrent.{ExecutionContext, Future}
-
import play.api.Logger
-import play.api.libs.json.{JsNumber, JsObject, JsString, Json}
-
+import play.api.libs.json.{Format, JsNumber, JsObject, JsString, Json}
import org.elastic4play.models.JsonFormat.enumFormat
import org.elastic4play.models.{AttributeDef, BaseEntity, EntityDef, HiveEnumeration, ModelDef, AttributeFormat => F, AttributeOption => O}
import org.elastic4play.services.FindSrv
@@ -14,7 +11,7 @@ import org.elastic4play.services.FindSrv
object OrganizationStatus extends Enumeration with HiveEnumeration {
type Type = Value
val Active, Locked = Value
- implicit val reads = enumFormat(this)
+ implicit val reads: Format[Value] = enumFormat(this)
}
trait OrganizationAttributes { _: AttributeDef =>
diff --git a/app/org/thp/cortex/models/User.scala b/app/org/thp/cortex/models/User.scala
index 28a9bc592..1c298b2c9 100644
--- a/app/org/thp/cortex/models/User.scala
+++ b/app/org/thp/cortex/models/User.scala
@@ -1,9 +1,7 @@
package org.thp.cortex.models
import scala.concurrent.Future
-
-import play.api.libs.json.{JsArray, JsBoolean, JsObject, JsString}
-
+import play.api.libs.json.{Format, JsArray, JsBoolean, JsObject, JsString}
import org.elastic4play.models.JsonFormat.enumFormat
import org.elastic4play.models.{AttributeDef, BaseEntity, EntityDef, HiveEnumeration, ModelDef, AttributeFormat => F, AttributeOption => O}
import org.elastic4play.services.{User => EUser}
@@ -11,7 +9,7 @@ import org.elastic4play.services.{User => EUser}
object UserStatus extends Enumeration with HiveEnumeration {
type Type = Value
val Ok, Locked = Value
- implicit val reads = enumFormat(this)
+ implicit val reads: Format[Value] = enumFormat(this)
}
trait UserAttributes { _: AttributeDef =>
diff --git a/app/org/thp/cortex/models/Worker.scala b/app/org/thp/cortex/models/Worker.scala
index 381f086ba..cfe336549 100644
--- a/app/org/thp/cortex/models/Worker.scala
+++ b/app/org/thp/cortex/models/Worker.scala
@@ -5,7 +5,7 @@ import org.elastic4play.models.JsonFormat.enumFormat
import org.elastic4play.models.{AttributeDef, BaseEntity, ChildModelDef, EntityDef, HiveEnumeration, AttributeFormat => F, AttributeOption => O}
import org.elastic4play.utils.Hasher
import org.thp.cortex.models.JsonFormat.workerTypeFormat
-import play.api.libs.json.{JsObject, JsString, Json}
+import play.api.libs.json.{Format, JsObject, JsString, Json}
import scala.concurrent.Future
import scala.util.Try
@@ -17,7 +17,7 @@ object RateUnit extends Enumeration with HiveEnumeration {
val Hour = Value(60 * 60)
val Day = Value(60 * 60 * 24)
val Month = Value(60 * 60 * 24 * 30)
- implicit val reads = enumFormat(this)
+ implicit val reads: Format[Value] = enumFormat(this)
}
object WorkerType extends Enumeration with HiveEnumeration {
diff --git a/app/org/thp/cortex/services/AuditSrv.scala b/app/org/thp/cortex/services/AuditActor.scala
similarity index 79%
rename from app/org/thp/cortex/services/AuditSrv.scala
rename to app/org/thp/cortex/services/AuditActor.scala
index 8a73e7aff..3d4b0ff2b 100644
--- a/app/org/thp/cortex/services/AuditSrv.scala
+++ b/app/org/thp/cortex/services/AuditActor.scala
@@ -1,16 +1,12 @@
package org.thp.cortex.services
import javax.inject.{Inject, Singleton}
-
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
-
import play.api.Logger
-
import akka.actor.{Actor, ActorRef}
import org.thp.cortex.models.JobStatus
-
-import org.elastic4play.models.BaseEntity
+import org.elastic4play.models.{BaseEntity, BaseModelDef}
import org.elastic4play.services._
object AuditActor {
@@ -24,10 +20,10 @@ class AuditActor @Inject() (eventSrv: EventSrv, implicit val ec: ExecutionContex
import AuditActor._
- object EntityExtractor {
- def unapply(e: BaseEntity) = Some((e.model, e.id, e.routing))
+ private object EntityExtractor {
+ def unapply(e: BaseEntity): Option[(BaseModelDef, String, String)] = Some((e.model, e.id, e.routing))
}
- var registration = Map.empty[String, Seq[ActorRef]]
+ private var registration = Map.empty[String, Seq[ActorRef]]
private[AuditActor] lazy val logger = Logger(getClass)
override def preStart(): Unit = {
@@ -42,17 +38,17 @@ class AuditActor @Inject() (eventSrv: EventSrv, implicit val ec: ExecutionContex
override def receive: Receive = {
case Register(jobId, timeout) =>
- logger.info(s"Register new listener for job $jobId ($sender)")
- val newActorList = registration.getOrElse(jobId, Nil) :+ sender
+ logger.info(s"Register new listener for job $jobId (${sender()})")
+ val newActorList = registration.getOrElse(jobId, Nil) :+ sender()
registration += (jobId -> newActorList)
- context.system.scheduler.scheduleOnce(timeout, self, Unregister(jobId, sender))
+ context.system.scheduler.scheduleOnce(timeout, self, Unregister(jobId, sender()))
case Unregister(jobId, actorRef) =>
logger.info(s"Unregister listener for job $jobId ($actorRef)")
val newActorList = registration.getOrElse(jobId, Nil).filterNot(_ == actorRef)
registration += (jobId -> newActorList)
- case AuditOperation(EntityExtractor(model, id, routing), action, details, authContext, date) =>
+ case AuditOperation(EntityExtractor(model, id, _), action, details, _, _) =>
if (model.modelName == "job" && action == AuditableAction.Update) {
logger.info(s"Job $id has be updated (${details \ "status"})")
val status = (details \ "status").asOpt[JobStatus.Type].getOrElse(JobStatus.InProgress)
diff --git a/app/org/thp/cortex/services/JobSrv.scala b/app/org/thp/cortex/services/JobSrv.scala
index f7fd9323f..1959f970e 100644
--- a/app/org/thp/cortex/services/JobSrv.scala
+++ b/app/org/thp/cortex/services/JobSrv.scala
@@ -19,7 +19,7 @@ import org.thp.cortex.models._
import org.elastic4play._
import org.elastic4play.controllers._
-import org.elastic4play.services._
+import org.elastic4play.services.{UserSrv => _, _}
import org.elastic4play.utils.Hasher
@Singleton
diff --git a/app/org/thp/cortex/services/ProcessJobRunnerSrv.scala b/app/org/thp/cortex/services/ProcessJobRunnerSrv.scala
index d5edcb275..62a26b61c 100644
--- a/app/org/thp/cortex/services/ProcessJobRunnerSrv.scala
+++ b/app/org/thp/cortex/services/ProcessJobRunnerSrv.scala
@@ -8,7 +8,6 @@ import play.api.libs.json.Json
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}
import javax.inject.{Inject, Singleton}
-import scala.collection.mutable
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.FiniteDuration
import scala.sys.process.{Process, ProcessLogger, _}
@@ -24,7 +23,7 @@ class ProcessJobRunnerSrv @Inject() (implicit val system: ActorSystem) {
def checkCortexUtilsVersion(pythonVersion: String): Option[(Int, Int, Int)] =
Try {
(s"pip$pythonVersion" :: "show" :: "cortexutils" :: Nil)
- .lineStream
+ .lazyLines
.collectFirst {
case pythonPackageVersionRegex(major, minor, patch) => (major.toInt, minor.toInt, patch.toInt)
}
@@ -34,7 +33,7 @@ class ProcessJobRunnerSrv @Inject() (implicit val system: ActorSystem) {
ec: ExecutionContext
): Try[Unit] = {
val baseDirectory = Paths.get(command).getParent.getParent
- val output = mutable.StringBuilder.newBuilder
+ val output = new StringBuilder()
logger.info(s"Execute $command in $baseDirectory, timeout is ${timeout.fold("none")(_.toString)}")
val cacertsFile = jobDirectory.resolve("input").resolve("cacerts")
val env = if (Files.exists(cacertsFile)) Seq("REQUESTS_CA_BUNDLE" -> cacertsFile.toString) else Nil
diff --git a/app/org/thp/cortex/services/StreamSrv.scala b/app/org/thp/cortex/services/StreamSrv.scala
index 3695199af..9ac57bfc6 100644
--- a/app/org/thp/cortex/services/StreamSrv.scala
+++ b/app/org/thp/cortex/services/StreamSrv.scala
@@ -180,7 +180,7 @@ class StreamActor(
wr.submit(Nil)
logger.error("Multiple requests !")
}
- context.become(receiveWithState(Some(new WaitingRequest(sender)), currentMessages))
+ context.become(receiveWithState(Some(new WaitingRequest(sender())), currentMessages))
case Submit =>
waitingRequest match {
diff --git a/app/org/thp/cortex/services/WorkerConfigSrv.scala b/app/org/thp/cortex/services/WorkerConfigSrv.scala
index 7cbdfe087..965678558 100644
--- a/app/org/thp/cortex/services/WorkerConfigSrv.scala
+++ b/app/org/thp/cortex/services/WorkerConfigSrv.scala
@@ -14,7 +14,7 @@ import org.scalactic.Accumulation._
import org.elastic4play.{AttributeCheckingError, NotFoundError}
import org.elastic4play.controllers.Fields
import org.elastic4play.database.ModifyConfig
-import org.elastic4play.services._
+import org.elastic4play.services.{UserSrv => _, _}
trait WorkerConfigSrv {
val configuration: Configuration
diff --git a/app/org/thp/cortex/services/WorkerSrv.scala b/app/org/thp/cortex/services/WorkerSrv.scala
index 9de59b0bd..17d19e8d1 100644
--- a/app/org/thp/cortex/services/WorkerSrv.scala
+++ b/app/org/thp/cortex/services/WorkerSrv.scala
@@ -6,8 +6,7 @@ import akka.stream.scaladsl.{Sink, Source}
import org.elastic4play._
import org.elastic4play.controllers.{Fields, StringInputValue}
import org.elastic4play.database.ModifyConfig
-import org.elastic4play.services.QueryDSL.any
-import org.elastic4play.services._
+import org.elastic4play.services.{UserSrv => _, _}
import org.scalactic.Accumulation._
import org.scalactic._
import org.thp.cortex.models._
@@ -17,10 +16,10 @@ import play.api.{Configuration, Logger}
import java.net.URL
import java.nio.file.{Files, Path, Paths}
import javax.inject.{Inject, Provider, Singleton}
-import scala.collection.JavaConverters._
import scala.concurrent.{ExecutionContext, Future}
import scala.io.Codec
import scala.util.{Failure, Success, Try}
+import scala.jdk.CollectionConverters._
@Singleton
class WorkerSrv @Inject() (
@@ -212,7 +211,9 @@ class WorkerSrv @Inject() (
}
.map { worker =>
val wmap = worker.flatten.map(w => w.id -> w).toMap
- workerMapLock.synchronized(workerMap = wmap)
+ workerMapLock.synchronized {
+ workerMap = wmap
+ }
logger.info(s"New worker list:\n\n\t${workerMap.values.map(a => s"${a.name} ${a.version}").mkString("\n\t")}\n")
}
@@ -229,7 +230,7 @@ class WorkerSrv @Inject() (
.validatedBy(_.read(rawConfig))
.map(JsObject.apply)
- val unknownConfigItems = (rawConfig.value.keySet -- configItems.map(_.name))
+ val unknownConfigItems = (rawConfig.value.keySet.toSet -- configItems.map(_.name))
.foldLeft[Unit Or Every[AttributeError]](Good(())) {
case (Good(_), ci) => Bad(One(UnknownAttributeError("worker.config", JsString(ci))))
case (Bad(e), ci) => Bad(UnknownAttributeError("worker.config", JsString(ci)) +: e)
diff --git a/app/org/thp/cortex/services/mappers/GroupUserMapper.scala b/app/org/thp/cortex/services/mappers/GroupUserMapper.scala
index c276a6001..2e85ccc5b 100644
--- a/app/org/thp/cortex/services/mappers/GroupUserMapper.scala
+++ b/app/org/thp/cortex/services/mappers/GroupUserMapper.scala
@@ -66,7 +66,7 @@ class GroupUserMapper(
case Some(groupsEndpointUrl) =>
logger.debug(s"Retreiving groups from $groupsEndpointUrl")
val apiCall = authHeader.fold(ws.url(groupsEndpointUrl))(headers => ws.url(groupsEndpointUrl).addHttpHeaders(headers))
- apiCall.get.flatMap { r =>
+ apiCall.get().flatMap { r =>
extractGroupsThenBuildUserFields(jsValue, r.json)
}
case None =>
diff --git a/app/org/thp/cortex/util/JsonConfig.scala b/app/org/thp/cortex/util/JsonConfig.scala
index 382b682fd..9099d3e4e 100644
--- a/app/org/thp/cortex/util/JsonConfig.scala
+++ b/app/org/thp/cortex/util/JsonConfig.scala
@@ -5,7 +5,7 @@ import com.typesafe.config.{ConfigList, ConfigObject, ConfigValue}
import play.api.Configuration
import play.api.libs.json._
-import scala.collection.JavaConverters._
+import scala.jdk.CollectionConverters._
object JsonConfig {
implicit val configValueWrites: Writes[ConfigValue] = Writes((value: ConfigValue) =>
diff --git a/build.sbt b/build.sbt
index e9f7c3211..935f7c7a9 100644
--- a/build.sbt
+++ b/build.sbt
@@ -5,12 +5,13 @@ ThisBuild / evictionErrorLevel := util.Level.Warn
ThisBuild / dependencyOverrides ++= Seq(
Dependencies.Play.twirl,
- "com.fasterxml.jackson.core" % "jackson-databind" % "2.13.5",
+ "com.fasterxml.jackson.core" % "jackson-databind" % "2.14.3",
"org.apache.commons" % "commons-compress" % "1.23.0",
"com.google.guava" % "guava" % "32.1.1-jre"
)
lazy val cortex = (project in file("."))
.enablePlugins(PlayScala)
+ .dependsOn(elastic4play)
.settings(projectSettings)
.settings(PackageSettings.packageSettings)
.settings(PackageSettings.rpmSettings)
@@ -25,10 +26,8 @@ lazy val cortex = (project in file("."))
Dependencies.Play.specs2 % Test,
Dependencies.Play.guice,
Dependencies.scalaGuice,
- Dependencies.elastic4play,
Dependencies.reflections,
Dependencies.zip4j,
- Dependencies.dockerClient,
Dependencies.dockerJavaClient,
Dependencies.dockerJavaTransport,
Dependencies.akkaCluster,
@@ -40,9 +39,6 @@ lazy val cortex = (project in file("."))
"com.github.jnr" % "jnr-enxio" % "0.32.14",
"com.github.jnr" % "jnr-unixsocket" % "0.38.19"
),
- resolvers += Resolver.sbtPluginRepo("releases"),
- resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases",
- resolvers += "elasticsearch-releases" at "https://artifacts.elastic.co/maven",
Compile / packageDoc / publishArtifact := false,
Compile / doc / sources := Seq.empty,
// Front-end //
@@ -55,6 +51,24 @@ lazy val cortex = (project in file("."))
)
)
+val elastic4sVersion = "7.17.4"
+
+lazy val elastic4play = (project in file("elastic4play"))
+ .enablePlugins(PlayScala)
+ .settings(
+libraryDependencies ++= Seq(
+ cacheApi,
+ "com.sksamuel.elastic4s" %% "elastic4s-core" % elastic4sVersion,
+ "com.sksamuel.elastic4s" %% "elastic4s-http-streams" % elastic4sVersion,
+ "com.sksamuel.elastic4s" %% "elastic4s-client-esjava" % elastic4sVersion,
+ "com.typesafe.akka" %% "akka-stream-testkit" % play.core.PlayVersion.akkaVersion % Test,
+ "org.scalactic" %% "scalactic" % "3.2.19",
+ specs2 % Test
+)
+
+)
+
+
lazy val cortexWithDeps = (project in file("target/docker-withdeps"))
.dependsOn(cortex)
.enablePlugins(DockerPlugin)
diff --git a/elastic4play/.drone.yml b/elastic4play/.drone.yml
new file mode 100644
index 000000000..ab76ca09b
--- /dev/null
+++ b/elastic4play/.drone.yml
@@ -0,0 +1,73 @@
+---
+kind: pipeline
+name: default
+type: docker
+
+steps:
+ # Restore cache of downloaded dependencies
+ - name: restore-cache
+ image: drillster/drone-volume-cache
+ settings:
+ restore: true
+ mount:
+ - .sbt
+ - .ivy2
+ volumes: [{name: cache, path: /cache}]
+
+ # Run project tests
+ - name: run-tests
+ image: thehiveproject/drone-scala-node
+ commands:
+ - sbt -Duser.home=$PWD test
+
+ # Publish package
+ - name: publish-package
+ image: thehiveproject/drone-scala-node
+ settings:
+ bintray_user: {from_secret: bintray_user}
+ bintray_key: {from_secret: bintray_key}
+ commands:
+ - mkdir -p .bintray
+ - echo realm = Bintray API Realm > .bintray/.credentials
+ - echo host = api.bintray.com >> .bintray/.credentials
+ - echo user = $PLUGIN_BINTRAY_USER >> .bintray/.credentials
+ - echo password = $PLUGIN_BINTRAY_KEY >> .bintray/.credentials
+ - sbt -Duser.home=$PWD publish
+ when:
+ event: [tag]
+
+ # Save external libraries in cache
+ - name: save-cache
+ image: drillster/drone-volume-cache
+ settings:
+ rebuild: true
+ mount:
+ - .sbt
+ - .ivy2
+ volumes: [{name: cache, path: /cache}]
+
+ - name: send message
+ image: thehiveproject/drone_keybase
+ settings:
+ username: {from_secret: keybase_username}
+ paperkey: {from_secret: keybase_paperkey}
+ channel: {from_secret: keybase_channel}
+ commands:
+ - |
+ keybase oneshot -u "$PLUGIN_USERNAME" --paperkey "$PLUGIN_PAPERKEY"
+ URL="$DRONE_SYSTEM_PROTO://$DRONE_SYSTEM_HOST/$DRONE_REPO/$DRONE_BUILD_NUMBER"
+ if [ $DRONE_BUILD_STATUS = "success" ]
+ then
+ keybase chat send "$PLUGIN_CHANNEL" ":white_check_mark: $DRONE_REPO: build succeeded $URL"
+ else
+ keybase chat send "$PLUGIN_CHANNEL" ":x: $DRONE_REPO: build failed $URL"
+ fi
+ when:
+ status:
+ - success
+ - failure
+
+volumes:
+ - name: cache
+ host:
+ path: /opt/drone/cache
diff --git a/elastic4play/.gitignore b/elastic4play/.gitignore
new file mode 100644
index 000000000..9cfdfecaf
--- /dev/null
+++ b/elastic4play/.gitignore
@@ -0,0 +1,37 @@
+logs
+!/bin/activator
+!/bin/activator.bat
+/bin/
+
+# sbt specific
+.cache
+.history
+.lib/
+.bsp/
+dist/*
+target/
+lib_managed/
+src_managed/
+project/boot/
+project/plugins/project/
+RUNNING_PID
+.cache-main
+.cache-tests
+
+# Eclipse
+.project
+.target
+.settings
+tmp
+.classpath
+
+# IntelliJ IDEA
+/.idea
+/*.iml
+/out
+/.idea_modules
+
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
diff --git a/elastic4play/.scalafmt.conf b/elastic4play/.scalafmt.conf
new file mode 100644
index 000000000..387904289
--- /dev/null
+++ b/elastic4play/.scalafmt.conf
@@ -0,0 +1,26 @@
+version = 2.3.2
+project.git = true
+align = more # For pretty alignment.
+assumeStandardLibraryStripMargin = true
+style = defaultWithAlign
+maxColumn = 150
+
+align.openParenCallSite = false
+align.openParenDefnSite = false
+newlines.alwaysBeforeTopLevelStatements = false
+rewrite.rules = [
+ RedundantBraces
+ RedundantParens
+ SortModifiers
+ PreferCurlyFors
+ SortImports
+]
+
+includeCurlyBraceInSelectChains = true
+includeNoParensInSelectChains = true
+
+rewriteTokens {
+ "⇒": "=>"
+ "←": "<-"
+ "→": "->"
+}
diff --git a/elastic4play/CHANGELOG.md b/elastic4play/CHANGELOG.md
new file mode 100644
index 000000000..b72e24b5d
--- /dev/null
+++ b/elastic4play/CHANGELOG.md
@@ -0,0 +1,387 @@
+# Change Log
+
+## [1.13.5](https://github.com/TheHive-Project/elastic4play/milestone/48) (2022-06-22)
+
+**Fixed bugs:**
+
+- Null values in objects are moved during json serialisation [\#106](https://github.com/TheHive-Project/elastic4play/issues/106)
+
+## [1.13.4](https://github.com/TheHive-Project/elastic4play/milestone/47) (2022-06-20)
+
+**Implemented enhancements:**
+
+- Update libraries [\#105](https://github.com/TheHive-Project/elastic4play/issues/105)
+
+## [1.13.3](https://github.com/TheHive-Project/elastic4play/milestone/46) (2021-11-05)
+
+**Fixed bugs:**
+
+- The jar is not compatible with java 8 [\#103](https://github.com/TheHive-Project/elastic4play/issues/103)
+
+## [1.13.2](https://github.com/TheHive-Project/elastic4play/milestone/45) (2021-10-29)
+
+**Closed issues:**
+
+- artifact is not available anymore in bintray [\#102](https://github.com/TheHive-Project/elastic4play/issues/102)
+
+## [1.13.1](https://github.com/TheHive-Project/elastic4play/milestone/44) (2021-02-19)
+
+**Fixed bugs:**
+
+- Update doesn't work on Elasticsearch 7.11 [\#101](https://github.com/TheHive-Project/elastic4play/issues/101)
+
+## [1.13.0](https://github.com/TheHive-Project/elastic4play/milestone/43) (2021-02-19)
+
+
+
+## [1.11.8](https://github.com/TheHive-Project/elastic4play/milestone/42) (2020-10-30)
+
+**Fixed bugs:**
+
+- Fix library conflicts [\#100](https://github.com/TheHive-Project/elastic4play/issues/100)
+
+## [1.11.7](https://github.com/TheHive-Project/elastic4play/milestone/41) (2020-10-29)
+
+**Fixed bugs:**
+
+- Update Playframework to fix vulnerabilities [\#99](https://github.com/TheHive-Project/elastic4play/issues/99)
+
+## [1.12.3](https://github.com/TheHive-Project/elastic4play/milestone/40) (2020-10-29)
+
+**Closed issues:**
+
+- Update Playframework to fix vulnerability [\#98](https://github.com/TheHive-Project/elastic4play/issues/98)
+
+## [1.12.2](https://github.com/TheHive-Project/elastic4play/milestone/38) (2020-10-26)
+
+**Fixed bugs:**
+
+- Use ES7 optimistic concurrency control [\#96](https://github.com/TheHive-Project/elastic4play/issues/96)
+
+## [1.11.6](https://github.com/TheHive-Project/elastic4play/milestone/39) (2020-10-26)
+
+**Closed issues:**
+
+- Use different queues for ES requests [\#97](https://github.com/TheHive-Project/elastic4play/issues/97)
+
+## [1.12.1](https://github.com/TheHive-Project/elastic4play/milestone/37) (2020-08-12)
+
+**Closed issues:**
+
+- Make sso authenticate method return an HTTP response [\#95](https://github.com/TheHive-Project/elastic4play/issues/95)
+
+## [1.12.0](https://github.com/TheHive-Project/elastic4play/milestone/36) (2020-08-11)
+
+**Implemented enhancements:**
+
+- Add support of ES7 [\#94](https://github.com/TheHive-Project/elastic4play/issues/94)
+
+**Fixed bugs:**
+
+- The size calculation of an empty attachment fails [\#93](https://github.com/TheHive-Project/elastic4play/issues/93)
+
+## [1.11.5](https://github.com/TheHive-Project/elastic4play/milestone/35) (2019-09-05)
+
+**Fixed bugs:**
+
+- Errors on streamed search are not correctly reported and induce timeouts [\#91](https://github.com/TheHive-Project/elastic4play/issues/91)
+- Replace groovy script by painless [\#92](https://github.com/TheHive-Project/elastic4play/issues/92)
+
+## [1.11.4](https://github.com/TheHive-Project/elastic4play/milestone/34) (2019-08-19)
+
+**Fixed bugs:**
+
+- Prevent the initial user to be stored in session [\#90](https://github.com/TheHive-Project/elastic4play/issues/90)
+
+## [1.11.3](https://github.com/TheHive-Project/elastic4play/milestone/33) (2019-07-03)
+
+**Implemented enhancements:**
+
+- Update PlayFramework to 2.6.23 [\#87](https://github.com/TheHive-Project/elastic4play/issues/87)
+- Add method to compute the size of attachment [\#89](https://github.com/TheHive-Project/elastic4play/issues/89)
+
+**Fixed bugs:**
+
+- Map _type field to join field [\#88](https://github.com/TheHive-Project/elastic4play/issues/88)
+
+## [1.11.2](https://github.com/TheHive-Project/elastic4play/milestone/32) (2019-05-27)
+
+**Fixed bugs:**
+
+- Update response is not parsed correctly [\#85](https://github.com/TheHive-Project/elastic4play/issues/85)
+
+## [1.11.1](https://github.com/TheHive-Project/elastic4play/milestone/31) (2019-05-20)
+
+**Fixed bugs:**
+
+- Query operator withParent doesn't work [\#83](https://github.com/TheHive-Project/elastic4play/issues/83)
+- Index creation fails on ElasticSearch 6 [\#84](https://github.com/TheHive-Project/elastic4play/issues/84)
+
+## [1.11.0](https://github.com/TheHive-Project/elastic4play/milestone/30) (2019-05-16)
+
+**Implemented enhancements:**
+
+- TCP Client to ES Deprecated - Switch to HTTP [\#34](https://github.com/TheHive-Project/elastic4play/issues/34)
+- Add support of ElasticSearch 6 [\#82](https://github.com/TheHive-Project/elastic4play/issues/82)
+
+## [1.10](https://github.com/TheHive-Project/elastic4play/milestone/29) (2019-03-19)
+
+**Implemented enhancements:**
+
+- Add attribute format for large raw data [\#80](https://github.com/TheHive-Project/elastic4play/issues/80)
+
+**Fixed bugs:**
+
+- Numeric field can't be searched with wildcard operator [\#81](https://github.com/TheHive-Project/elastic4play/issues/81)
+
+## [1.9.0](https://github.com/TheHive-Project/elastic4play/milestone/28) (2019-03-18)
+
+**Implemented enhancements:**
+
+- Add support of wildcard search [\#79](https://github.com/TheHive-Project/elastic4play/issues/79)
+
+## [1.8.0](https://github.com/TheHive-Project/elastic4play/milestone/27) (2019-03-07)
+
+**Implemented enhancements:**
+
+- Don't generate session cookie if API key is used for authentication [\#78](https://github.com/TheHive-Project/elastic4play/issues/78)
+
+## [1.7.2](https://github.com/TheHive-Project/elastic4play/milestone/26) (2019-02-05)
+
+**Implemented enhancements:**
+
+- Role doesn't implement toString [\#77](https://github.com/TheHive-Project/elastic4play/issues/77)
+
+**Fixed bugs:**
+
+- Order is inverted when extracting list of strings from Fields object [\#76](https://github.com/TheHive-Project/elastic4play/issues/76)
+
+## [1.7.1](https://github.com/TheHive-Project/elastic4play/milestone/25) (2018-11-29)
+
+**Implemented enhancements:**
+
+- Add configuration for drone continuous integration [\#74](https://github.com/TheHive-Project/elastic4play/issues/74)
+- Lowercase user ID coming from HTTP header [\#75](https://github.com/TheHive-Project/elastic4play/issues/75)
+
+## [1.7.0](https://github.com/TheHive-Project/elastic4play/milestone/24) (2018-11-28)
+
+**Implemented enhancements:**
+
+- Allow external authentication on a reverse proxy [\#73](https://github.com/TheHive-Project/elastic4play/issues/73)
+
+## [1.6.3](https://github.com/TheHive-Project/elastic4play/milestone/23) (2018-10-09)
+
+**Fixed bugs:**
+
+- AuxSrv doesn't use Json marshaller define in model [\#70](https://github.com/TheHive-Project/elastic4play/issues/70)
+- User name extracted from certificate is not correctly lowercased [\#71](https://github.com/TheHive-Project/elastic4play/issues/71)
+
+## [1.6.2](https://github.com/TheHive-Project/elastic4play/milestone/22) (2018-09-25)
+
+**Implemented enhancements:**
+
+- Add a better filter on attributes in AuxSrv [\#69](https://github.com/TheHive-Project/elastic4play/issues/69)
+
+**Fixed bugs:**
+
+- Make certificate field case insensitive [\#68](https://github.com/TheHive-Project/elastic4play/issues/68)
+
+## [1.6.1](https://github.com/TheHive-Project/elastic4play/milestone/21) (2018-08-27)
+
+**Implemented enhancements:**
+
+- Make SSL truststore configuration optional [\#64](https://github.com/TheHive-Project/elastic4play/issues/64)
+- X509 authentication: request certificate without requiring it [\#65](https://github.com/TheHive-Project/elastic4play/issues/65)
+
+**Fixed bugs:**
+
+- GroupByTime on nested fields doesn't work [\#66](https://github.com/TheHive-Project/elastic4play/issues/66)
+
+## [1.6.0](https://github.com/TheHive-Project/elastic4play/milestone/20) (2018-07-31)
+
+**Implemented enhancements:**
+
+- Elasticsearch secured by SearchGuard [\#53](https://github.com/TheHive-Project/elastic4play/issues/53)
+- New TheHive-Project repository [\#58](https://github.com/TheHive-Project/elastic4play/issues/58)
+
+**Fixed bugs:**
+
+- Temporary files cannot be created on Windows as their filename contains ":" [\#59](https://github.com/TheHive-Project/elastic4play/issues/59)
+- Entity rename in migration doesn't work [\#60](https://github.com/TheHive-Project/elastic4play/issues/60)
+- x.509 PKI - illegal object in getInstance: org.bouncycastle.asn1.DERTaggedObject [\#61](https://github.com/TheHive-Project/elastic4play/issues/61)
+- Session cookie expiration is not correctly checked [\#62](https://github.com/TheHive-Project/elastic4play/issues/62)
+- Race condition when an attachment is saved [\#63](https://github.com/TheHive-Project/elastic4play/issues/63)
+
+**Closed issues:**
+
+- Single Sign-On with X.509 certificates [\#26](https://github.com/TheHive-Project/elastic4play/issues/26)
+- SSL support [\#56](https://github.com/TheHive-Project/elastic4play/issues/56)
+
+## [1.5.0](https://github.com/TheHive-Project/elastic4play/milestone/15) (2018-03-29)
+
+**Implemented enhancements:**
+
+- Stream is not cluster ready [\#41](https://github.com/TheHive-Project/elastic4play/issues/41)
+- Get version of ElasticSearch cluster [\#51](https://github.com/TheHive-Project/elastic4play/issues/51)
+
+**Closed issues:**
+
+- OAuth2 Single Sign-on support [\#42](https://github.com/TheHive-Project/elastic4play/issues/42)
+- Cache result of get request [\#45](https://github.com/TheHive-Project/elastic4play/issues/45)
+- Add ability to provide multiple roles on controller helper [\#52](https://github.com/TheHive-Project/elastic4play/issues/52)
+
+## [1.4.6](https://github.com/TheHive-Project/elastic4play/milestone/19) (2018-03-29)
+
+**Implemented enhancements:**
+
+- Add the ability to remove datastore entry [\#54](https://github.com/TheHive-Project/elastic4play/issues/54)
+
+## [1.4.5](https://github.com/TheHive-Project/elastic4play/milestone/18) (2018-02-27)
+
+**Fixed bugs:**
+
+- Meta attributes are filtered when entities are converted to json [\#50](https://github.com/TheHive-Project/elastic4play/issues/50)
+
+## [1.4.4](https://github.com/TheHive-Project/elastic4play/milestone/17) (2018-02-08)
+
+**Fixed bugs:**
+
+- Version of document is not retrieve [\#49](https://github.com/TheHive-Project/elastic4play/issues/49)
+
+## [1.4.3](https://github.com/TheHive-Project/elastic4play/milestone/16) (2018-02-08)
+
+**Implemented enhancements:**
+
+- Manage concurrent updates [\#44](https://github.com/TheHive-Project/elastic4play/issues/44)
+- Make migration streams configurable [\#46](https://github.com/TheHive-Project/elastic4play/issues/46)
+- Add settings in index creation [\#47](https://github.com/TheHive-Project/elastic4play/issues/47)
+
+**Fixed bugs:**
+
+- getEntity of migration service doesn't use the right index [\#48](https://github.com/TheHive-Project/elastic4play/issues/48)
+
+## [1.4.2](https://github.com/TheHive-Project/elastic4play/milestone/14) (2018-01-04)
+
+**Pull requests:**
+
+- Add XPack authentication support [\#39](https://github.com/TheHive-Project/elastic4play/pull/39)
+
+## [1.4.1](https://github.com/TheHive-Project/elastic4play/milestone/13) (2017-12-15)
+
+**Fixed bugs:**
+
+- Error when configuring multiple ElasticSearch nodes [\#38](https://github.com/TheHive-Project/elastic4play/issues/38)
+
+## [1.4.0](https://github.com/TheHive-Project/elastic4play/milestone/11) (2017-12-05)
+
+**Implemented enhancements:**
+
+- Add the ability to describe attributes of an entity [\#32](https://github.com/TheHive-Project/elastic4play/issues/32)
+- Remove the deprecated "user" attribute [\#33](https://github.com/TheHive-Project/elastic4play/issues/33)
+- Add query inside aggregations [\#36](https://github.com/TheHive-Project/elastic4play/issues/36)
+
+**Fixed bugs:**
+
+- Query on numeric value doesn't work [\#37](https://github.com/TheHive-Project/elastic4play/issues/37)
+
+## [1.3.2](https://github.com/TheHive-Project/elastic4play/milestone/12) (2017-11-07)
+
+**Fixed bugs:**
+
+- Aggregation on sub-field doesn't work [\#35](https://github.com/TheHive-Project/elastic4play/issues/35)
+
+## [1.3.1](https://github.com/TheHive-Project/elastic4play/milestone/10) (2017-09-19)
+
+**Fixed bugs:**
+
+- Text attribute can't be aggregated nor sorted [\#31](https://github.com/TheHive-Project/elastic4play/issues/31)
+
+## [1.3.0](https://github.com/TheHive-Project/elastic4play/milestone/8) (2017-09-13)
+
+**Implemented enhancements:**
+
+- Add support of ElasticSearch 5 [\#11](https://github.com/TheHive-Project/elastic4play/issues/11)
+- Remove defined user roles [\#24](https://github.com/TheHive-Project/elastic4play/issues/24)
+- Add API key authentication type [\#25](https://github.com/TheHive-Project/elastic4play/issues/25)
+- Add configuration to disable authentication methods [\#28](https://github.com/TheHive-Project/elastic4play/issues/28)
+- Rename authentication type by authentication provider [\#29](https://github.com/TheHive-Project/elastic4play/issues/29)
+- Add method to query ElasticSearch cluster health [\#30](https://github.com/TheHive-Project/elastic4play/issues/30)
+
+**Fixed bugs:**
+
+- Handle search query error [\#27](https://github.com/TheHive-Project/elastic4play/issues/27)
+
+**Closed issues:**
+
+- Update Play to 2.6 and Scala to 2.12 [\#23](https://github.com/TheHive-Project/elastic4play/issues/23)
+
+## [1.2.1](https://github.com/TheHive-Project/elastic4play/milestone/9) (2017-08-14)
+
+**Fixed bugs:**
+
+- Typo on database check [\#22](https://github.com/TheHive-Project/elastic4play/issues/22)
+
+## [1.2.0](https://github.com/TheHive-Project/elastic4play/milestone/3) (2017-06-29)
+
+**Implemented enhancements:**
+
+- Add support of custom fields attribute [\#16](https://github.com/TheHive-Project/elastic4play/issues/16)
+- Support of attachment in subattribute [\#17](https://github.com/TheHive-Project/elastic4play/issues/17)
+- Save attachment from data in memory [\#18](https://github.com/TheHive-Project/elastic4play/issues/18)
+- Add method to update a dblist [\#19](https://github.com/TheHive-Project/elastic4play/issues/19)
+- Add an API to check if a dblist item exists [\#20](https://github.com/TheHive-Project/elastic4play/issues/20)
+- Add ability to create different document types in stream sink [\#21](https://github.com/TheHive-Project/elastic4play/issues/21)
+
+**Fixed bugs:**
+
+- Object attributes are not checked for mandatory subattributes [\#15](https://github.com/TheHive-Project/elastic4play/issues/15)
+
+## [1.1.5](https://github.com/TheHive-Project/elastic4play/milestone/6) (2017-05-11)
+
+**Implemented enhancements:**
+
+- Make index creation configurable [\#9](https://github.com/TheHive-Project/elastic4play/issues/9)
+
+**Fixed bugs:**
+
+- Offset is not taken into account if search uses scroll [\#12](https://github.com/TheHive-Project/elastic4play/issues/12)
+
+**Closed issues:**
+
+- Scala code cleanup [\#14](https://github.com/TheHive-Project/elastic4play/issues/14)
+
+## [1.1.4](https://github.com/TheHive-Project/elastic4play/milestone/5) (2017-05-11)
+
+**Implemented enhancements:**
+
+- Update playframework to 2.5.14 [\#13](https://github.com/TheHive-Project/elastic4play/issues/13)
+
+## [1.1.3](https://github.com/TheHive-Project/elastic4play/milestone/4) (2017-03-29)
+
+**Implemented enhancements:**
+
+- Permit to filter out unaudited attributes in AuxSrv [\#10](https://github.com/TheHive-Project/elastic4play/issues/10)
+
+**Fixed bugs:**
+
+- Invalidate DBList cache when it is updated [\#8](https://github.com/TheHive-Project/elastic4play/issues/8)
+
+## [1.1.2](https://github.com/TheHive-Project/elastic4play/milestone/2) (2017-02-21)
+
+**Implemented enhancements:**
+
+- [Refactoring] Make Fields methods more coherent [\#5](https://github.com/TheHive-Project/elastic4play/issues/5)
+- [Refactoring] Add global error handler [\#6](https://github.com/TheHive-Project/elastic4play/issues/6)
+- Change date format (ISO -> Timestamp) [\#7](https://github.com/TheHive-Project/elastic4play/issues/7)
+
+## [1.1.1](https://github.com/TheHive-Project/elastic4play/milestone/1) (2016-11-22)
+
+**Implemented enhancements:**
+
+- [Refactoring] Format Scala code in build process [\#2](https://github.com/TheHive-Project/elastic4play/issues/2)
+- [Feature] Add support of attachment input value [\#3](https://github.com/TheHive-Project/elastic4play/issues/3)
+
+**Fixed bugs:**
+
+- [Bug] Fix the build configuration file [\#1](https://github.com/TheHive-Project/elastic4play/issues/1)
+- [Bug] Prevent authentication module to indicate if user exists or not [\#4](https://github.com/TheHive-Project/elastic4play/issues/4)
diff --git a/elastic4play/LICENSE b/elastic4play/LICENSE
new file mode 100644
index 000000000..dbbe35581
--- /dev/null
+++ b/elastic4play/LICENSE
@@ -0,0 +1,661 @@
+ GNU AFFERO GENERAL PUBLIC LICENSE
+ Version 3, 19 November 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU Affero General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as published
+ by the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+.
diff --git a/elastic4play/app/org/elastic4play/ErrorHandler.scala b/elastic4play/app/org/elastic4play/ErrorHandler.scala
new file mode 100644
index 000000000..1a6509531
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/ErrorHandler.scala
@@ -0,0 +1,63 @@
+package org.elastic4play
+
+import java.net.ConnectException
+
+import scala.concurrent.Future
+
+import play.api.Logger
+import play.api.http.{HttpErrorHandler, Status, Writeable}
+import play.api.libs.json.{JsNull, JsValue, Json}
+import play.api.mvc.{RequestHeader, ResponseHeader, Result, Results}
+
+import org.elastic4play.JsonFormat.attributeCheckingExceptionWrites
+
+/**
+ * This class handles errors. It traverses all causes of exception to find known error and shows the appropriate message
+ */
+class ErrorHandler extends HttpErrorHandler {
+
+ private[ErrorHandler] lazy val logger = Logger(getClass)
+
+ def onClientError(request: RequestHeader, statusCode: Int, message: String): Future[Result] = Future.successful {
+ Results.Status(statusCode)(s"A client error occurred on ${request.method} ${request.uri} : $message")
+ }
+
+ def toErrorResult(ex: Throwable): Option[(Int, JsValue)] =
+ ex match {
+ case AuthenticationError(message) => Some(Status.UNAUTHORIZED -> Json.obj("type" -> "AuthenticationError", "message" -> message))
+ case AuthorizationError(message) => Some(Status.FORBIDDEN -> Json.obj("type" -> "AuthorizationError", "message" -> message))
+ case UpdateError(_, message, attributes) =>
+ Some(Status.INTERNAL_SERVER_ERROR -> Json.obj("type" -> "UpdateError", "message" -> message, "object" -> attributes))
+ case InternalError(message) => Some(Status.INTERNAL_SERVER_ERROR -> Json.obj("type" -> "InternalError", "message" -> message))
+ case nfe: NumberFormatException =>
+ Some(Status.BAD_REQUEST -> Json.obj("type" -> "NumberFormatException", "message" -> ("Invalid format " + nfe.getMessage)))
+ case NotFoundError(message) => Some(Status.NOT_FOUND -> Json.obj("type" -> "NotFoundError", "message" -> message))
+ case BadRequestError(message) => Some(Status.BAD_REQUEST -> Json.obj("type" -> "BadRequest", "message" -> message))
+ case SearchError(message) => Some(Status.BAD_REQUEST -> Json.obj("type" -> "SearchError", "message" -> s"$message"))
+ case ace: AttributeCheckingError => Some(Status.BAD_REQUEST -> Json.toJson(ace))
+ case iae: IllegalArgumentException => Some(Status.BAD_REQUEST -> Json.obj("type" -> "IllegalArgument", "message" -> iae.getMessage))
+ case _: ConnectException =>
+ Some(Status.INTERNAL_SERVER_ERROR -> Json.obj("type" -> "NoNodeAvailable", "message" -> "ElasticSearch cluster is unreachable"))
+ case CreateError(_, message, attributes) =>
+ Some(Status.INTERNAL_SERVER_ERROR -> Json.obj("type" -> "CreateError", "message" -> message, "object" -> attributes))
+ case ErrorWithObject(tpe, message, obj) => Some(Status.BAD_REQUEST -> Json.obj("type" -> tpe, "message" -> message, "object" -> obj))
+ case GetError(message) => Some(Status.INTERNAL_SERVER_ERROR -> Json.obj("type" -> "GetError", "message" -> message))
+ case MultiError(message, exceptions) =>
+ val suberrors = exceptions.map(e => toErrorResult(e)).collect {
+ case Some((_, j)) => j
+ }
+ Some(Status.MULTI_STATUS -> Json.obj("type" -> "MultiError", "error" -> message, "suberrors" -> suberrors))
+ case IndexNotFoundException => Some(520 -> JsNull)
+ case t: Throwable => Option(t.getCause).flatMap(toErrorResult)
+ }
+
+ def toResult[C](status: Int, c: C)(implicit writeable: Writeable[C]): Result = Result(header = ResponseHeader(status), body = writeable.toEntity(c))
+
+ def onServerError(request: RequestHeader, exception: Throwable): Future[Result] = {
+ val (status, body) = toErrorResult(exception).getOrElse(
+ Status.INTERNAL_SERVER_ERROR -> Json.obj("type" -> exception.getClass.getName, "message" -> exception.getMessage)
+ )
+ logger.info(s"${request.method} ${request.uri} returned $status", exception)
+ Future.successful(toResult(status, body))
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/Errors.scala b/elastic4play/app/org/elastic4play/Errors.scala
new file mode 100644
index 000000000..6aa828418
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/Errors.scala
@@ -0,0 +1,52 @@
+package org.elastic4play
+
+import play.api.libs.json.{JsObject, JsValue}
+
+import org.elastic4play.controllers.InputValue
+
+class ErrorWithObject(message: String, val obj: JsObject) extends Exception(message)
+
+object ErrorWithObject {
+ def unapply(ewo: ErrorWithObject): Option[(String, String, JsObject)] = Some((ewo.getClass.getSimpleName, ewo.getMessage, ewo.obj))
+}
+
+case class BadRequestError(message: String) extends Exception(message)
+case class CreateError(status: Option[String], message: String, attributes: JsObject) extends ErrorWithObject(message, attributes)
+case class ConflictError(message: String, attributes: JsObject) extends ErrorWithObject(message, attributes)
+case class NotFoundError(message: String) extends Exception(message)
+case class GetError(message: String) extends Exception(message)
+case class UpdateError(status: Option[String], message: String, attributes: JsObject) extends ErrorWithObject(message, attributes)
+case class InternalError(message: String) extends Exception(message)
+case class SearchError(message: String) extends Exception(message)
+case class AuthenticationError(message: String) extends Exception(message)
+case class AuthorizationError(message: String) extends Exception(message)
+case class MultiError(message: String, exceptions: Seq[Exception])
+ extends Exception(message + exceptions.map(_.getMessage).mkString(" :\n\t- ", "\n\t- ", ""))
+case object IndexNotFoundException extends Exception
+
+case class AttributeCheckingError(tableName: String, errors: Seq[AttributeError] = Nil) extends Exception(errors.mkString("[", "][", "]")) {
+ override def toString: String = errors.mkString("[", "][", "]")
+}
+
+sealed trait AttributeError extends Throwable {
+ def withName(name: String): AttributeError
+ val name: String
+ override def getMessage: String = toString
+}
+
+case class InvalidFormatAttributeError(name: String, format: String, value: InputValue) extends AttributeError {
+ override def toString = s"Invalid format for $name: $value, expected $format"
+ override def withName(newName: String): AttributeError = copy(name = newName)
+}
+case class UnknownAttributeError(name: String, value: JsValue) extends AttributeError {
+ override def toString = s"Unknown attribute $name: $value"
+ override def withName(newName: String): AttributeError = copy(name = newName)
+}
+case class UpdateReadOnlyAttributeError(name: String) extends AttributeError {
+ override def toString = s"Attribute $name is read-only"
+ override def withName(newName: String): AttributeError = copy(name = newName)
+}
+case class MissingAttributeError(name: String) extends AttributeError {
+ override def toString = s"Attribute $name is missing"
+ override def withName(newName: String): AttributeError = copy(name = newName)
+}
diff --git a/elastic4play/app/org/elastic4play/JsonFormat.scala b/elastic4play/app/org/elastic4play/JsonFormat.scala
new file mode 100644
index 000000000..fe64d8230
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/JsonFormat.scala
@@ -0,0 +1,55 @@
+package org.elastic4play
+
+import java.util.Date
+
+import scala.util.{Failure, Success, Try}
+
+import play.api.libs.json._
+
+import org.elastic4play.controllers.JsonFormat.inputValueFormat
+
+object JsonFormat {
+ val datePattern = "yyyyMMdd'T'HHmmssZ"
+ private val dateReads: Reads[Date] = Reads.dateReads(datePattern).orElse(Reads.DefaultDateReads).orElse(Reads.LongReads.map(new Date(_)))
+ private val dateWrites: Writes[Date] = Writes[Date](d => JsNumber(d.getTime))
+ implicit val dateFormat: Format[Date] = Format(dateReads, dateWrites)
+
+ private val invalidFormatAttributeErrorWrites = Writes[InvalidFormatAttributeError] { ifae =>
+ Json.writes[InvalidFormatAttributeError].writes(ifae) +
+ ("type" -> JsString("InvalidFormatAttributeError")) +
+ ("message" -> JsString(ifae.toString))
+ }
+ private val unknownAttributeErrorWrites = Writes[UnknownAttributeError] { uae =>
+ Json.writes[UnknownAttributeError].writes(uae) +
+ ("type" -> JsString("UnknownAttributeError")) +
+ ("message" -> JsString(uae.toString))
+ }
+ private val updateReadOnlyAttributeErrorWrites = Writes[UpdateReadOnlyAttributeError] { uroae =>
+ Json.writes[UpdateReadOnlyAttributeError].writes(uroae) +
+ ("type" -> JsString("UpdateReadOnlyAttributeError")) +
+ ("message" -> JsString(uroae.toString))
+ }
+ private val missingAttributeErrorWrites = Writes[MissingAttributeError] { mae =>
+ Json.writes[MissingAttributeError].writes(mae) +
+ ("type" -> JsString("MissingAttributeError")) +
+ ("message" -> JsString(mae.toString))
+ }
+
+ implicit val attributeCheckingExceptionWrites: OWrites[AttributeCheckingError] = OWrites[AttributeCheckingError] { ace =>
+ Json.obj(
+ "tableName" -> ace.tableName,
+ "type" -> "AttributeCheckingError",
+ "errors" -> JsArray(ace.errors.map {
+ case e: InvalidFormatAttributeError => invalidFormatAttributeErrorWrites.writes(e)
+ case e: UnknownAttributeError => unknownAttributeErrorWrites.writes(e)
+ case e: UpdateReadOnlyAttributeError => updateReadOnlyAttributeErrorWrites.writes(e)
+ case e: MissingAttributeError => missingAttributeErrorWrites.writes(e)
+ })
+ )
+ }
+
+ implicit def tryWrites[A](implicit aWrites: Writes[A]): Writes[Try[A]] = Writes[Try[A]] {
+ case Success(a) => aWrites.writes(a)
+ case Failure(t) => JsString(t.getMessage)
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/Timed.java b/elastic4play/app/org/elastic4play/Timed.java
new file mode 100644
index 000000000..f5ba39e21
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/Timed.java
@@ -0,0 +1,12 @@
+package org.elastic4play;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.METHOD)
+public @interface Timed {
+ String value() default "";
+}
\ No newline at end of file
diff --git a/elastic4play/app/org/elastic4play/controllers/Authenticated.scala b/elastic4play/app/org/elastic4play/controllers/Authenticated.scala
new file mode 100644
index 000000000..6b802c327
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/controllers/Authenticated.scala
@@ -0,0 +1,202 @@
+package org.elastic4play.controllers
+
+import org.elastic4play.services.{AuthContext, AuthSrv, Role, UserSrv}
+import org.elastic4play.utils.Instance
+import org.elastic4play.{AuthenticationError, AuthorizationError}
+import play.api.http.HeaderNames
+import play.api.mvc._
+import play.api.{Configuration, Logger}
+
+import java.util.Date
+import javax.inject.{Inject, Singleton}
+import scala.concurrent.duration.{DurationLong, FiniteDuration}
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.Try
+
+/**
+ * A request with authentication information
+ */
+class AuthenticatedRequest[A](val authContext: AuthContext, request: Request[A]) extends WrappedRequest[A](request) with AuthContext {
+ def userId: String = authContext.userId
+ def userName: String = authContext.userName
+ def requestId: String = Instance.getRequestId(request)
+ def roles: Seq[Role] = authContext.roles
+ def authMethod: String = authContext.authMethod
+}
+
+sealed trait ExpirationStatus
+case class ExpirationOk(duration: FiniteDuration) extends ExpirationStatus
+case class ExpirationWarning(duration: FiniteDuration) extends ExpirationStatus
+case object ExpirationError extends ExpirationStatus
+
+/**
+ * Check and manager user security (authentication and authorization)
+ */
+@Singleton
+class Authenticated(
+ maxSessionInactivity: FiniteDuration,
+ sessionWarning: FiniteDuration,
+ sessionUsername: String,
+ authHeaderName: Option[String],
+ authBySessionCookie: Boolean,
+ authByKey: Boolean,
+ authByBasicAuth: Boolean,
+ authByInitialUser: Boolean,
+ authByHeader: Boolean,
+ userSrv: UserSrv,
+ authSrv: AuthSrv,
+ defaultParser: BodyParsers.Default,
+ implicit val ec: ExecutionContext
+) {
+
+ @Inject() def this(configuration: Configuration, userSrv: UserSrv, authSrv: AuthSrv, defaultParser: BodyParsers.Default, ec: ExecutionContext) =
+ this(
+ configuration.getMillis("session.inactivity").millis,
+ configuration.getMillis("session.warning").millis,
+ configuration.getOptional[String]("session.username").getOrElse("username"),
+ configuration.getOptional[String]("auth.header.name"),
+ configuration.getOptional[Boolean]("auth.method.session").getOrElse(true),
+ configuration.getOptional[Boolean]("auth.method.key").getOrElse(true),
+ configuration.getOptional[Boolean]("auth.method.basic").getOrElse(true),
+ configuration.getOptional[Boolean]("auth.method.init").getOrElse(true),
+ configuration.getOptional[Boolean]("auth.method.header").getOrElse(false),
+ userSrv,
+ authSrv,
+ defaultParser,
+ ec
+ )
+
+ private[Authenticated] lazy val logger = Logger(getClass)
+
+ private def now = (new Date).getTime
+
+ /**
+ * Insert or update session cookie containing user name and session expiration timestamp
+ * Cookie is signed by Play framework (it cannot be modified by user)
+ */
+ def setSessingUser(result: Result, authContext: AuthContext)(implicit request: RequestHeader): Result =
+ if (authContext.authMethod != "key" && authContext.authMethod != "init")
+ result.addingToSession(
+ sessionUsername -> authContext.userId,
+ "expire" -> (now + maxSessionInactivity.toMillis).toString,
+ "authMethod" -> authContext.authMethod
+ )
+ else
+ result
+
+ /**
+ * Retrieve authentication information form cookie
+ */
+ def getFromSession(request: RequestHeader): Future[AuthContext] = {
+ val authContext = for {
+ userId <- request.session.get(sessionUsername).toRight(AuthenticationError("User session not found"))
+ authMethod <- request.session.get("authMethod").toRight(AuthenticationError("Authentication method not found in session"))
+ _ <- if (expirationStatus(request) != ExpirationError) Right(()) else Left(AuthenticationError("User session has expired"))
+ ctx = userSrv.getFromId(request, userId, authMethod)
+ } yield ctx
+ authContext.fold(authError => Future.failed[AuthContext](authError), identity)
+ }
+
+ def expirationStatus(request: RequestHeader): ExpirationStatus =
+ request
+ .session
+ .get("expire")
+ .flatMap { expireStr =>
+ Try(expireStr.toLong).toOption
+ }
+ .map { expire =>
+ (expire - now).millis
+ }
+ .map {
+ case duration if duration.length < 0 => ExpirationError
+ case duration if duration < sessionWarning => ExpirationWarning(duration)
+ case duration => ExpirationOk(duration)
+ }
+ .getOrElse(ExpirationError)
+
+ /**
+ * Retrieve authentication information from API key
+ */
+ def getFromApiKey(request: RequestHeader): Future[AuthContext] =
+ for {
+ auth <- request
+ .headers
+ .get(HeaderNames.AUTHORIZATION)
+ .fold(Future.failed[String](AuthenticationError("Authentication header not found")))(Future.successful)
+ _ <- if (!auth.startsWith("Bearer ")) Future.failed(AuthenticationError("Only bearer authentication is supported")) else Future.successful(())
+ key = auth.substring(7)
+ authContext <- authSrv.authenticate(key)(request)
+ } yield authContext
+
+ def getFromBasicAuth(request: RequestHeader): Future[AuthContext] =
+ for {
+ auth <- request
+ .headers
+ .get(HeaderNames.AUTHORIZATION)
+ .fold(Future.failed[String](AuthenticationError("Authentication header not found")))(Future.successful)
+ _ <- if (!auth.startsWith("Basic ")) Future.failed(AuthenticationError("Only basic authentication is supported")) else Future.successful(())
+ authWithoutBasic = auth.substring(6)
+ decodedAuth = new String(java.util.Base64.getDecoder.decode(authWithoutBasic), "UTF-8")
+ authContext <- decodedAuth.split(":") match {
+ case Array(username, password) => authSrv.authenticate(username, password)(request)
+ case _ => Future.failed(AuthenticationError("Can't decode authentication header"))
+ }
+ } yield authContext
+
+ def getFromHeader(request: RequestHeader): Future[AuthContext] =
+ for {
+ header <- authHeaderName.fold[Future[String]](Future.failed(AuthenticationError("HTTP header is not configured")))(Future.successful)
+ username <- request.headers.get(header).fold[Future[String]](Future.failed(AuthenticationError("HTTP header is not set")))(Future.successful)
+ user <- userSrv.getFromId(request, username.toLowerCase, "header")
+ } yield user
+
+ val authenticationMethods: Seq[(String, RequestHeader => Future[AuthContext])] =
+ (if (authBySessionCookie) Seq("session" -> getFromSession _) else Nil) ++
+ (if (authByKey) Seq("key" -> getFromApiKey _) else Nil) ++
+ (if (authByBasicAuth) Seq("basic" -> getFromBasicAuth _) else Nil) ++
+ (if (authByInitialUser) Seq("init" -> userSrv.getInitialUser _) else Nil) ++
+ (if (authByHeader) Seq("header" -> getFromHeader _) else Nil)
+
+ def getContext(request: RequestHeader): Future[AuthContext] =
+ authenticationMethods
+ .foldLeft[Future[Either[Seq[(String, Throwable)], AuthContext]]](Future.successful(Left(Nil))) {
+ case (acc, (authMethodName, authMethod)) =>
+ acc.flatMap {
+ case authContext @ Right(_) => Future.successful(authContext)
+ case Left(errors) =>
+ authMethod(request)
+ .map(authContext => Right(authContext))
+ .recover { case error => Left(errors :+ (authMethodName -> error)) }
+ }
+ }
+ .flatMap {
+ case Right(authContext) => Future.successful(authContext)
+ case Left(errors) =>
+ val errorDetails = errors
+ .map { case (authMethodName, error) => s"\t$authMethodName: ${error.getClass.getSimpleName} ${error.getMessage}" }
+ .mkString("\n")
+ logger.error(s"Authentication failure:\n$errorDetails")
+ Future.failed(AuthenticationError("Authentication failure"))
+ }
+
+ /**
+ * Create an action for authenticated controller
+ * If user has sufficient right (have required role) action is executed
+ * otherwise, action returns a not authorized error
+ */
+ def apply(requiredRole: Role*): ActionBuilder[AuthenticatedRequest, AnyContent] =
+ new ActionBuilder[AuthenticatedRequest, AnyContent] {
+ val executionContext: ExecutionContext = ec
+
+ def parser: BodyParser[AnyContent] = defaultParser
+
+ def invokeBlock[A](request: Request[A], block: AuthenticatedRequest[A] => Future[Result]): Future[Result] =
+ getContext(request).flatMap { authContext =>
+ if (requiredRole.isEmpty || requiredRole.toSet.intersect(authContext.roles.toSet).nonEmpty)
+ block(new AuthenticatedRequest(authContext, request))
+ .map(result => setSessingUser(result, authContext)(request))
+ else
+ Future.failed(AuthorizationError(s"Insufficient rights to perform this action"))
+ }
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/controllers/Fields.scala b/elastic4play/app/org/elastic4play/controllers/Fields.scala
new file mode 100644
index 000000000..b6296db40
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/controllers/Fields.scala
@@ -0,0 +1,265 @@
+package org.elastic4play.controllers
+
+import akka.util.ByteString
+import org.elastic4play.BadRequestError
+import org.elastic4play.controllers.JsonFormat.{fieldsReader, pathFormat}
+import org.elastic4play.services.Attachment
+import org.elastic4play.utils.Hash
+import play.api.Logger
+import play.api.libs.json.Json.toJsFieldJsValueWrapper
+import play.api.libs.json._
+import play.api.libs.streams.Accumulator
+import play.api.mvc._
+
+import java.nio.file.Path
+import java.util.Locale
+import javax.inject.Inject
+import scala.collection.immutable
+import scala.concurrent.ExecutionContext
+import scala.util.Try
+
+/**
+ * Define a data value from HTTP request. It can be simple string, json, file or null (maybe xml in future)
+ */
+sealed trait InputValue {
+ def jsonValue: JsValue
+}
+
+/**
+ * Define a data value from HTTP request as simple string
+ */
+case class StringInputValue(data: Seq[String]) extends InputValue {
+ def jsonValue: JsValue = Json.toJson(data)
+}
+
+object StringInputValue {
+ def apply(s: String): StringInputValue = this(Seq(s))
+}
+
+/**
+ * Define a data value from HTTP request as json value
+ */
+case class JsonInputValue(data: JsValue) extends InputValue {
+ def jsonValue: JsValue = data
+}
+
+/**
+ * Define a data value from HTTP request as file (filename, path to temporary file and content type). Other data are lost
+ */
+case class FileInputValue(name: String, filepath: Path, contentType: String) extends InputValue {
+ def jsonValue: JsObject = Json.obj("name" -> name, "filepath" -> filepath, "contentType" -> contentType)
+}
+
+/**
+ * Define an attachment that is already in datastore. This type can't be from HTTP request.
+ */
+case class AttachmentInputValue(name: String, hashes: Seq[Hash], size: Long, contentType: String, id: String) extends InputValue {
+ def jsonValue: JsObject = Json.obj("name" -> name, "hashes" -> hashes.map(_.toString()), "size" -> size, "contentType" -> contentType, "id" -> id)
+ def toAttachment: Attachment = Attachment(name, hashes, size, contentType, id)
+}
+
+object AttachmentInputValue {
+
+ def apply(attachment: Attachment) =
+ new AttachmentInputValue(attachment.name, attachment.hashes, attachment.size, attachment.contentType, attachment.id)
+}
+
+/**
+ * Define a data value from HTTP request as null (empty value)
+ */
+object NullInputValue extends InputValue {
+ def jsonValue: JsValue = JsNull
+}
+
+/**
+ * Contain data values from HTTP request
+ */
+class Fields(private val fields: Map[String, InputValue]) {
+
+ /**
+ * Get InputValue
+ */
+ def get(name: String): Option[InputValue] =
+ fields.get(name)
+
+ /**
+ * Get data value as String. Returns None if field doesn't exist or format is not a string
+ */
+ def getString(name: String): Option[String] =
+ fields.get(name) collect {
+ case StringInputValue(Seq(s)) => s
+ case JsonInputValue(JsString(s)) => s
+ }
+
+ /**
+ * Get data value as list of String. Returns None if field doesn't exist or format is not a list of string
+ */
+ def getStrings(name: String): Option[Seq[String]] = fields.get(name) flatMap {
+ case StringInputValue(ss) => Some(ss)
+ case JsonInputValue(js: JsArray) => js.asOpt[Seq[String]]
+ case _ => None
+ }
+
+ /**
+ * Get data value as list of String. Returns None if field doesn't exist or format is not a list of string
+ */
+ def getStrings(name: String, separator: String): Option[Seq[String]] = fields.get(name) flatMap {
+ case StringInputValue(ss) => Some(ss.flatMap(_.split(separator)).filterNot(_.isEmpty))
+ case JsonInputValue(js: JsArray) => js.asOpt[Seq[String]]
+ case _ => None
+ }
+
+ /**
+ * Get data value as Long. Returns None if field doesn't exist or format is not a Long
+ */
+ def getLong(name: String): Option[Long] = fields.get(name) flatMap {
+ case StringInputValue(Seq(s)) => Try(s.toLong).toOption
+ case JsonInputValue(JsNumber(b)) => Some(b.longValue)
+ case _ => None
+ }
+
+ def getBoolean(name: String): Option[Boolean] = fields.get(name) flatMap {
+ case JsonInputValue(JsBoolean(b)) => Some(b)
+ case StringInputValue(Seq(s)) => Try(s.toBoolean).orElse(Try(s.toLong == 1)).toOption
+ case _ => None
+ }
+
+ /**
+ * Get data value as json. Returns None if field doesn't exist or can't be converted to json
+ */
+ def getValue(name: String): Option[JsValue] = fields.get(name) collect {
+ case JsonInputValue(js) => js
+ case StringInputValue(Seq(s)) => JsString(s)
+ case StringInputValue(ss) => Json.toJson(ss)
+ }
+
+ def getValues(name: String): Seq[JsValue] = fields.get(name).toSeq flatMap {
+ case JsonInputValue(JsArray(js)) => js
+ case StringInputValue(ss) => ss.map(s => JsString(s))
+ case _ => Nil
+ }
+
+ /**
+ * Extract all fields, name and value
+ */
+ def map[A](f: ((String, InputValue)) => A): immutable.Iterable[A] = fields.map(f)
+
+ /**
+ * Extract all field values
+ */
+ def mapValues(f: InputValue => InputValue) = new Fields(fields.view.mapValues(f).toMap)
+
+ /**
+ * Returns a copy of this class with a new field (or replacing existing field)
+ */
+ def set(name: String, value: InputValue): Fields = new Fields(fields + (name -> value))
+
+ /**
+ * Returns a copy of this class with a new field (or replacing existing field)
+ */
+ def set(name: String, value: String): Fields = set(name, StringInputValue(Seq(value)))
+
+ /**
+ * Returns a copy of this class with a new field (or replacing existing field)
+ */
+ def set(name: String, value: JsValue): Fields = set(name, JsonInputValue(value))
+
+ /**
+ * Returns a copy of this class with a new field if value is not None otherwise returns this
+ */
+ def set(name: String, value: Option[JsValue]): Fields = value.fold(this)(v => set(name, v))
+
+ /**
+ * Return a copy of this class without the specified field
+ */
+ def unset(name: String): Fields = new Fields(fields - name)
+
+ /**
+ * Returns true if the specified field name is present
+ */
+ def contains(name: String): Boolean = fields.contains(name)
+
+ def isEmpty: Boolean = fields.isEmpty
+
+ def addIfAbsent(name: String, value: String): Fields = getString(name).fold(set(name, value))(_ => this)
+
+ def addIfAbsent(name: String, value: JsValue): Fields = getValue(name).fold(set(name, value))(_ => this)
+
+ def addIfAbsent(name: String, value: InputValue): Fields = get(name).fold(set(name, value))(_ => this)
+
+ def ++(other: IterableOnce[(String, InputValue)]) = new Fields(fields ++ other)
+
+ override def toString: String = fields.toString()
+}
+
+object Fields {
+ val empty: Fields = new Fields(Map.empty[String, InputValue])
+
+ /**
+ * Create an instance of Fields from a JSON object
+ */
+ def apply(obj: JsObject): Fields = {
+ val fields = obj.value.view.mapValues(v => JsonInputValue(v)).toMap
+ new Fields(fields)
+ }
+
+ def apply(fields: Map[String, InputValue]): Fields = {
+ if (fields.keysIterator.exists(_.startsWith("_")))
+ throw BadRequestError("Field starting with '_' is forbidden")
+ new Fields(fields)
+ }
+}
+
+class FieldsBodyParser @Inject() (playBodyParsers: PlayBodyParsers, implicit val ec: ExecutionContext) extends BodyParser[Fields] {
+
+ private[FieldsBodyParser] lazy val logger = Logger(getClass)
+
+ def apply(request: RequestHeader): Accumulator[ByteString, Either[Result, Fields]] = {
+ def queryFields = request.queryString.view.mapValues(v => StringInputValue(v)).toMap
+
+ request.contentType.map(_.toLowerCase(Locale.ENGLISH)) match {
+
+ case Some("text/json") | Some("application/json") => playBodyParsers.json[Fields].map(f => f ++ queryFields).apply(request)
+
+ case Some("application/x-www-form-urlencoded") =>
+ playBodyParsers
+ .tolerantFormUrlEncoded
+ .map { form =>
+ Fields(form.view.mapValues(v => StringInputValue(v)).toMap)
+ }
+ .map(f => f ++ queryFields)
+ .apply(request)
+
+ case Some("multipart/form-data") =>
+ playBodyParsers
+ .multipartFormData
+ .map {
+ case MultipartFormData(dataParts, files, _) =>
+ val dataFields = dataParts
+ .getOrElse("_json", Nil)
+ .headOption
+ .map { s =>
+ Json
+ .parse(s)
+ .as[JsObject]
+ .value
+ .view
+ .mapValues(v => JsonInputValue(v))
+ .toMap
+ }
+ .getOrElse(Map.empty)
+ val fileFields = files.map { f =>
+ f.key -> FileInputValue(f.filename.split("[/\\\\]").last, f.ref.path, f.contentType.getOrElse("application/octet-stream"))
+ }
+ Fields(dataFields ++ fileFields ++ queryFields)
+ }
+ .apply(request)
+
+ case contentType =>
+ val contentLength = request.headers.get("Content-Length").fold(0)(_.toInt)
+ if (contentLength != 0)
+ logger.warn(s"Unrecognized content-type : ${contentType.getOrElse("not set")} on $request (length=$contentLength)")
+ Accumulator.done(Right(Fields(queryFields)))
+ }
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/controllers/JsonFormat.scala b/elastic4play/app/org/elastic4play/controllers/JsonFormat.scala
new file mode 100644
index 000000000..bfdce4fa5
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/controllers/JsonFormat.scala
@@ -0,0 +1,82 @@
+package org.elastic4play.controllers
+
+import java.io.File
+import java.nio.file.{Path, Paths}
+
+import play.api.libs.json.JsValue.jsValueToJsLookup
+import play.api.libs.json.Json.toJsFieldJsValueWrapper
+import play.api.libs.json._
+
+import org.elastic4play.utils.Hash
+
+object JsonFormat {
+
+ private val fileReads = Reads[File] { json =>
+ json.validate[String].map(filepath => new File(filepath))
+ }
+ private val fileWrites = Writes[File]((file: File) => JsString(file.getAbsolutePath))
+ implicit val fileFormat: Format[File] = Format[File](fileReads, fileWrites)
+
+ private val pathReads = Reads[Path] { json =>
+ json.validate[String].map(filepath => Paths.get(filepath))
+ }
+ private val pathWrites = Writes[Path]((path: Path) => JsString(path.toString))
+ implicit val pathFormat: Format[Path] = Format[Path](pathReads, pathWrites)
+
+ private val fileInputValueWrites = Writes[FileInputValue] { (fiv: FileInputValue) =>
+ fiv.jsonValue + ("type" -> JsString("FileInputValue"))
+ }
+ private val stringInputValueReads = Reads[StringInputValue] { json =>
+ (json \ "value").validate[Seq[String]].map(s => StringInputValue(s))
+ }
+ private val jsonInputValueReads = Reads[JsonInputValue] { json =>
+ (json \ "value").validate[JsValue].map(v => JsonInputValue(v))
+ }
+ private val fileInputValueReads = Reads[FileInputValue] { json =>
+ for {
+ name <- (json \ "name").validate[String]
+ filepath <- (json \ "filepath").validate[Path]
+ contentType <- (json \ "contentType").validate[String]
+ } yield FileInputValue(name, filepath, contentType)
+ }
+
+ val attachmentInputValueReads: Reads[AttachmentInputValue] = Reads { json =>
+ for {
+ name <- (json \ "name").validate[String]
+ hashes <- (json \ "hashes").validate[Seq[String]]
+ size <- (json \ "size").validate[Long]
+ contentType <- (json \ "contentType").validate[String]
+ id <- (json \ "id").validate[String]
+ } yield AttachmentInputValue(name, hashes.map(Hash.apply), size, contentType, id)
+ }
+
+ private val inputValueWrites = Writes[InputValue]((value: InputValue) =>
+ value match {
+ case v: StringInputValue => Json.obj("type" -> "StringInputValue", "value" -> v.jsonValue)
+ case v: JsonInputValue => Json.obj("type" -> "JsonInputValue", "value" -> v.jsonValue)
+ case v: FileInputValue => Json.obj("type" -> "FileInputValue", "value" -> v.jsonValue)
+ case v: AttachmentInputValue => Json.obj("type" -> "AttachmentInputValue", "value" -> v.jsonValue)
+ case NullInputValue => Json.obj("type" -> "NullInputValue")
+ }
+ )
+
+ private val inputValueReads = Reads { json =>
+ (json \ "type").validate[String].flatMap {
+ case "StringInputValue" => (json \ "value").validate(stringInputValueReads)
+ case "JsonInputValue" => (json \ "value").validate(jsonInputValueReads)
+ case "FileInputValue" => (json \ "value").validate(fileInputValueReads)
+ case "AttachmentInputValue" => (json \ "value").validate(attachmentInputValueReads)
+ case "NullInputValue" => JsSuccess(NullInputValue)
+ }
+ }
+
+ implicit val fileInputValueFormat: Format[FileInputValue] = Format[FileInputValue](fileInputValueReads, fileInputValueWrites)
+
+ implicit val inputValueFormat: Format[InputValue] = Format[InputValue](inputValueReads, inputValueWrites)
+
+ implicit val fieldsReader: Reads[Fields] = Reads {
+ case json: JsObject => JsSuccess(Fields(json))
+ case _ => JsError("Expecting JSON object body")
+ }
+
+}
diff --git a/elastic4play/app/org/elastic4play/controllers/MigrationCtrl.scala b/elastic4play/app/org/elastic4play/controllers/MigrationCtrl.scala
new file mode 100644
index 000000000..ffde9926f
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/controllers/MigrationCtrl.scala
@@ -0,0 +1,23 @@
+package org.elastic4play.controllers
+
+import javax.inject.{Inject, Singleton}
+
+import scala.concurrent.ExecutionContext
+
+import play.api.mvc._
+
+import org.elastic4play.Timed
+import org.elastic4play.services.MigrationSrv
+
+/**
+ * Migration controller : start migration process
+ */
+@Singleton
+class MigrationCtrl @Inject() (migrationSrv: MigrationSrv, components: ControllerComponents, implicit val ec: ExecutionContext)
+ extends AbstractController(components) {
+
+ @Timed("controllers.MigrationCtrl.migrate")
+ def migrate: Action[AnyContent] = Action.async {
+ migrationSrv.migrate.map(_ => NoContent)
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/controllers/Renderer.scala b/elastic4play/app/org/elastic4play/controllers/Renderer.scala
new file mode 100644
index 000000000..277fe723f
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/controllers/Renderer.scala
@@ -0,0 +1,55 @@
+package org.elastic4play.controllers
+
+import javax.inject.Inject
+
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.{Failure, Success, Try}
+
+import play.api.http.Status
+import play.api.libs.json.Json.toJsFieldJsValueWrapper
+import play.api.libs.json.{JsValue, Json, Writes}
+import play.api.mvc.{Result, Results}
+
+import akka.stream.Materializer
+import akka.stream.scaladsl.Source
+
+import org.elastic4play.ErrorHandler
+
+class Renderer @Inject() (errorHandler: ErrorHandler, implicit val ec: ExecutionContext, implicit val mat: Materializer) {
+
+ def toMultiOutput[A](status: Int, objects: Seq[Try[A]])(implicit writes: Writes[A]): Result = {
+
+ val (success, failure) = objects.foldLeft((Seq.empty[JsValue], Seq.empty[JsValue])) {
+ case ((artifacts, errors), Success(a)) => (Json.toJson(a) +: artifacts, errors)
+ case ((artifacts, errors), Failure(e)) =>
+ val errorJson = errorHandler.toErrorResult(e) match {
+ case Some((_, j)) => j
+ case None => Json.obj("type" -> e.getClass.getName, "error" -> e.getMessage)
+ }
+ (artifacts, errorJson +: errors)
+
+ }
+ if (failure.isEmpty)
+ toOutput(status, success)
+ else if (success.isEmpty)
+ toOutput(Status.BAD_REQUEST, failure)
+ else
+ toOutput(Status.MULTI_STATUS, Json.obj("success" -> success, "failure" -> failure))
+ }
+
+ def toOutput[C](status: Int, content: C)(implicit writes: Writes[C]): Result = {
+ val json = Json.toJson(content)
+ val s = new Results.Status(status)
+ s(json)
+ }
+
+ def toOutput[C](status: Int, src: Source[C, _], total: Future[Long])(implicit writes: Writes[C]): Future[Result] = {
+ val stringSource = src.map(s => Json.toJson(s).toString).intersperse("[", ",", "]")
+ total.map { t =>
+ new Results.Status(status)
+ .chunked(stringSource)
+ .as("application/json")
+ .withHeaders("X-Total" -> t.toString)
+ }
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/database/DBConfiguration.scala b/elastic4play/app/org/elastic4play/database/DBConfiguration.scala
new file mode 100644
index 000000000..2b7ab4875
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/database/DBConfiguration.scala
@@ -0,0 +1,225 @@
+package org.elastic4play.database
+
+import java.nio.file.{Files, Paths}
+import java.security.KeyStore
+
+import akka.NotUsed
+import akka.actor.ActorSystem
+import akka.stream.scaladsl.{Sink, Source}
+import com.sksamuel.elastic4s._
+import com.sksamuel.elastic4s.http.JavaClient
+import com.sksamuel.elastic4s.requests.bulk.BulkResponseItem
+import com.sksamuel.elastic4s.requests.searches.{SearchHit, SearchRequest}
+import com.sksamuel.elastic4s.streams.ReactiveElastic.ReactiveElastic
+import com.sksamuel.elastic4s.streams.{RequestBuilder, ResponseListener}
+import javax.inject.{Inject, Named, Singleton}
+import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory}
+import org.apache.http.auth.{AuthScope, UsernamePasswordCredentials}
+import org.apache.http.client.CredentialsProvider
+import org.apache.http.client.config.RequestConfig
+import org.apache.http.impl.client.BasicCredentialsProvider
+import org.apache.http.impl.nio.client.HttpAsyncClientBuilder
+import org.elastic4play.{ConflictError, IndexNotFoundException, InternalError, SearchError}
+import org.elasticsearch.client.RestClientBuilder.{HttpClientConfigCallback, RequestConfigCallback}
+import play.api.inject.ApplicationLifecycle
+import play.api.libs.json.JsObject
+import play.api.{Configuration, Logger}
+
+import scala.jdk.CollectionConverters._
+import scala.concurrent.duration.DurationInt
+import scala.concurrent.{ExecutionContext, Future, Promise}
+
+/**
+ * This class is a wrapper of ElasticSearch client from Elastic4s
+ * It builds the client using configuration (ElasticSearch addresses, cluster and index name)
+ * It add timed annotation in order to measure storage metrics
+ */
+@Singleton
+class DBConfiguration @Inject() (
+ config: Configuration,
+ lifecycle: ApplicationLifecycle,
+ @Named("databaseVersion") val version: Int,
+ implicit val actorSystem: ActorSystem
+) {
+ private[DBConfiguration] lazy val logger = Logger(getClass)
+
+ def requestConfigCallback: RequestConfigCallback = (requestConfigBuilder: RequestConfig.Builder) => {
+ requestConfigBuilder.setAuthenticationEnabled(credentialsProviderMaybe.isDefined)
+ config.getOptional[Boolean]("search.circularRedirectsAllowed").foreach(requestConfigBuilder.setCircularRedirectsAllowed)
+ config.getOptional[Int]("search.connectionRequestTimeout").foreach(requestConfigBuilder.setConnectionRequestTimeout)
+ config.getOptional[Int]("search.connectTimeout").foreach(requestConfigBuilder.setConnectTimeout)
+ config.getOptional[Boolean]("search.contentCompressionEnabled").foreach(requestConfigBuilder.setContentCompressionEnabled)
+ config.getOptional[String]("search.cookieSpec").foreach(requestConfigBuilder.setCookieSpec)
+ config.getOptional[Boolean]("search.expectContinueEnabled").foreach(requestConfigBuilder.setExpectContinueEnabled)
+ // config.getOptional[InetAddress]("search.localAddress").foreach(requestConfigBuilder.setLocalAddress)
+ config.getOptional[Int]("search.maxRedirects").foreach(requestConfigBuilder.setMaxRedirects)
+ // config.getOptional[Boolean]("search.proxy").foreach(requestConfigBuilder.setProxy)
+ config.getOptional[Seq[String]]("search.proxyPreferredAuthSchemes").foreach(v => requestConfigBuilder.setProxyPreferredAuthSchemes(v.asJava))
+ config.getOptional[Boolean]("search.redirectsEnabled").foreach(requestConfigBuilder.setRedirectsEnabled)
+ config.getOptional[Boolean]("search.relativeRedirectsAllowed").foreach(requestConfigBuilder.setRelativeRedirectsAllowed)
+ config.getOptional[Int]("search.socketTimeout").foreach(requestConfigBuilder.setSocketTimeout)
+ config.getOptional[Seq[String]]("search.targetPreferredAuthSchemes").foreach(v => requestConfigBuilder.setTargetPreferredAuthSchemes(v.asJava))
+ requestConfigBuilder
+ }
+
+ lazy val credentialsProviderMaybe: Option[CredentialsProvider] =
+ for {
+ user <- config.getOptional[String]("search.user")
+ password <- config.getOptional[String]("search.password")
+ } yield {
+ val provider = new BasicCredentialsProvider
+ val credentials = new UsernamePasswordCredentials(user, password)
+ provider.setCredentials(AuthScope.ANY, credentials)
+ provider
+ }
+
+ lazy val sslContextMaybe: Option[SSLContext] = config.getOptional[String]("search.keyStore.path").map { keyStore =>
+ val keyStorePath = Paths.get(keyStore)
+ val keyStoreType = config.getOptional[String]("search.keyStore.type").getOrElse(KeyStore.getDefaultType)
+ val keyStorePassword = config.getOptional[String]("search.keyStore.password").getOrElse("").toCharArray
+ val keyInputStream = Files.newInputStream(keyStorePath)
+ val keyManagers =
+ try {
+ val keyStore = KeyStore.getInstance(keyStoreType)
+ keyStore.load(keyInputStream, keyStorePassword)
+ val kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
+ kmf.init(keyStore, keyStorePassword)
+ kmf.getKeyManagers
+ } finally {
+ keyInputStream.close()
+ }
+
+ val trustManagers = config
+ .getOptional[String]("search.trustStore.path")
+ .map { trustStorePath =>
+ val keyStoreType = config.getOptional[String]("search.trustStore.type").getOrElse(KeyStore.getDefaultType)
+ val trustStorePassword = config.getOptional[String]("search.trustStore.password").getOrElse("").toCharArray
+ val trustInputStream = Files.newInputStream(Paths.get(trustStorePath))
+ try {
+ val keyStore = KeyStore.getInstance(keyStoreType)
+ keyStore.load(trustInputStream, trustStorePassword)
+ val tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
+ tmf.init(keyStore)
+ tmf.getTrustManagers
+ } finally {
+ trustInputStream.close()
+ }
+ }
+ .getOrElse(Array.empty)
+
+ // Configure the SSL context to use TLS
+ val sslContext = SSLContext.getInstance("TLS")
+ sslContext.init(keyManagers, trustManagers, null)
+ sslContext
+ }
+
+ def httpClientConfig: HttpClientConfigCallback = (httpClientBuilder: HttpAsyncClientBuilder) => {
+ sslContextMaybe.foreach(httpClientBuilder.setSSLContext)
+ credentialsProviderMaybe.foreach(httpClientBuilder.setDefaultCredentialsProvider)
+ httpClientBuilder
+ }
+
+ /**
+ * Underlying ElasticSearch client
+ */
+ private[database] val props = ElasticProperties(config.get[String]("search.uri"))
+ private[database] var clients = Map.empty[ExecutionContext, ElasticClient]
+ private[database] def getClient(ec: ExecutionContext): ElasticClient =
+ clients.get(ec) match {
+ case Some(c) => c
+ case None =>
+ synchronized {
+ val c = clients.getOrElse(ec, ElasticClient(JavaClient(props, requestConfigCallback, httpClientConfig)))
+ clients = clients + (ec -> c)
+ c
+ }
+ }
+ // when application close, close also ElasticSearch connection
+ lifecycle.addStopHook { () =>
+ clients.values.foreach(_.close())
+ Future.successful(())
+ }
+
+ def execute[T, U](t: T)(
+ implicit
+ handler: Handler[T, U],
+ manifest: Manifest[U],
+ ec: ExecutionContext
+ ): Future[U] = {
+ val client = getClient(ec)
+ logger.debug(s"Elasticsearch request: ${client.show(t)}")
+ client.execute(t).flatMap {
+ case RequestSuccess(_, _, _, r) => Future.successful(r)
+ case RequestFailure(_, _, _, error) =>
+ val exception = error.`type` match {
+ case "index_not_found_exception" => IndexNotFoundException
+ case "version_conflict_engine_exception" => ConflictError(error.reason, JsObject.empty)
+ case "search_phase_execution_exception" => SearchError(error.reason)
+ case _ => InternalError(s"Unknown error: $error")
+ }
+ exception match {
+ case _: ConflictError =>
+ case _ => logger.error(s"ElasticSearch request failure: ${client.show(t)}\n => $error")
+ }
+ Future.failed(exception)
+ }
+ }
+
+ /**
+ * Creates a Source (akka stream) from the result of the search
+ */
+ def source(searchRequest: SearchRequest)(implicit ec: ExecutionContext): Source[SearchHit, NotUsed] =
+ Source.fromPublisher(getClient(ec).publisher(searchRequest))
+
+ /**
+ * Create a Sink (akka stream) that create entity in ElasticSearch
+ */
+ def sink[T](implicit builder: RequestBuilder[T], ec: ExecutionContext): Sink[T, Future[Unit]] = {
+ val sinkListener = new ResponseListener[T] {
+ override def onAck(resp: BulkResponseItem, original: T): Unit = ()
+
+ override def onFailure(resp: BulkResponseItem, original: T): Unit =
+ logger.warn(s"Document index failure ${resp.id}: ${resp.error.fold("unexpected")(_.toString)}\n$original")
+ }
+ val end = Promise[Unit]()
+ val complete = () => {
+ if (!end.isCompleted)
+ end.success(())
+ ()
+ }
+ val failure = (t: Throwable) => {
+ end.failure(t)
+ ()
+ }
+ Sink
+ .fromSubscriber(
+ getClient(ec).subscriber(
+ batchSize = 100,
+ concurrentRequests = 5,
+ refreshAfterOp = false,
+ listener = sinkListener,
+ typedListener = ResponseListener.noop,
+ completionFn = complete,
+ errorFn = failure,
+ flushInterval = None,
+ flushAfter = None,
+ failureWait = 2.seconds,
+ maxAttempts = 10
+ )
+ )
+ .mapMaterializedValue { _ =>
+ end.future
+ }
+ }
+
+ /**
+ * Name of the index, suffixed by the current version
+ */
+ val indexName: String = config.get[String]("search.index") + "_" + version
+
+ /**
+ * return a new instance of DBConfiguration that points to the previous version of the index schema
+ */
+ def previousVersion: DBConfiguration =
+ new DBConfiguration(config, lifecycle, version - 1, actorSystem)
+}
diff --git a/elastic4play/app/org/elastic4play/database/DBCreate.scala b/elastic4play/app/org/elastic4play/database/DBCreate.scala
new file mode 100644
index 000000000..54f8bedb4
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/database/DBCreate.scala
@@ -0,0 +1,112 @@
+package org.elastic4play.database
+
+import scala.concurrent.{ExecutionContext, Future}
+import play.api.Logger
+import play.api.libs.json.JsValue.jsValueToJsLookup
+import play.api.libs.json._
+import akka.stream.scaladsl.Sink
+import com.sksamuel.elastic4s.ElasticDsl._
+import com.sksamuel.elastic4s.requests.common.RefreshPolicy
+import com.sksamuel.elastic4s.requests.indexes.IndexRequest
+import com.sksamuel.elastic4s.streams.RequestBuilder
+import javax.inject.{Inject, Singleton}
+import org.elastic4play.models.BaseEntity
+
+/**
+ * Service lass responsible for entity creation
+ * This service doesn't check any attribute conformity (according to model)
+ */
+@Singleton
+class DBCreate @Inject() (db: DBConfiguration) {
+
+ private[DBCreate] lazy val logger = Logger(getClass)
+
+ /**
+ * Create an entity of type "modelName" with attributes
+ *
+ * @param modelName name of the model of the creating entity
+ * @param attributes JSON object containing attributes of the creating entity. Attributes can contain _id, _parent and _routing.
+ * @return created entity attributes with _id and _routing (and _parent if entity is a child)
+ */
+ def apply(modelName: String, attributes: JsObject)(implicit ec: ExecutionContext): Future[JsObject] =
+ apply(modelName, None, attributes)
+
+ /**
+ * Create an entity of type modelName with attributes and optionally a parent
+ *
+ * @param modelName name of the model of the creating entity
+ * @param parent parent of the creating entity (if model is ChildModelDef
+ * @param attributes JSON object containing attributes of the creating entity.
+ * Attributes can contain _id, _parent and _routing. Routing and parent informations are extracted from parent parameter (if present)
+ * @return created entity attributes with _id and _routing (and _parent if entity is a child)
+ */
+ def apply(modelName: String, parent: Option[BaseEntity], attributes: JsObject)(implicit ec: ExecutionContext): Future[JsObject] = {
+ val id = (attributes \ "_id").asOpt[String]
+ val parentId = parent
+ .map(_.id)
+ .orElse((attributes \ "_parent").asOpt[String])
+ val routing = parent
+ .map(_.routing)
+ .orElse((attributes \ "_routing").asOpt[String])
+ .orElse(id)
+
+ // remove attributes that starts with "_" because we wan't permit to interfere with elasticsearch internal fields
+ val docSource = addParent(modelName, parent, JsObject(attributes.fields.filterNot(_._1.startsWith("_"))))
+
+ db.execute {
+ addId(id).andThen(addRouting(routing)) {
+ indexInto(db.indexName).source(docSource.toString).refresh(RefreshPolicy.WAIT_FOR)
+ }
+ }
+ .map(indexResponse =>
+ attributes +
+ ("_type" -> JsString(modelName)) +
+ ("_id" -> JsString(indexResponse.id)) +
+ ("_parent" -> parentId.fold[JsValue](JsNull)(JsString)) +
+ ("_routing" -> JsString(routing.getOrElse(indexResponse.id))) +
+ ("_seqNo" -> JsNumber(indexResponse.seqNo)) +
+ ("_primaryTerm" -> JsNumber(indexResponse.primaryTerm))
+ )
+ }
+
+ /**
+ * add id information in index definition
+ */
+ private def addId(id: Option[String]): IndexRequest => IndexRequest = id match {
+ case Some(i) => _ id i createOnly true
+ case None => identity
+ }
+
+ /**
+ * add routing information in index definition
+ */
+ private def addRouting(routing: Option[String]): IndexRequest => IndexRequest = routing match {
+ case Some(r) => _ routing r
+ case None => identity
+ }
+
+ private def addParent(modelName: String, parent: Option[BaseEntity], entity: JsObject): JsObject = parent match {
+ case Some(p) => entity + ("relations" -> Json.obj("name" -> modelName, "parent" -> p.id))
+ case None => entity + ("relations" -> JsString(modelName))
+ }
+
+ /**
+ * Class used to build index definition based on model name and attributes
+ * This class is used by sink (ElasticSearch reactive stream)
+ */
+ private class AttributeRequestBuilder() extends RequestBuilder[JsObject] {
+ override def request(attributes: JsObject): IndexRequest = {
+ val id = (attributes \ "_id").asOpt[String]
+ val routing = (attributes \ "_routing").asOpt[String] orElse id
+ val docSource = JsObject(attributes.fields.filterNot(_._1.startsWith("_")))
+ addId(id).andThen(addRouting(routing)) {
+ indexInto(db.indexName).source(docSource.toString)
+ }
+ }
+ }
+
+ /**
+ * build a akka stream sink that create entities
+ */
+ def sink()(implicit ec: ExecutionContext): Sink[JsObject, Future[Unit]] = db.sink(new AttributeRequestBuilder(), ec)
+}
diff --git a/elastic4play/app/org/elastic4play/database/DBFind.scala b/elastic4play/app/org/elastic4play/database/DBFind.scala
new file mode 100644
index 000000000..c106e2d98
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/database/DBFind.scala
@@ -0,0 +1,221 @@
+package org.elastic4play.database
+
+import akka.NotUsed
+import akka.stream.scaladsl.Source
+import akka.stream.stage.{AsyncCallback, GraphStage, GraphStageLogic, OutHandler}
+import akka.stream.{Attributes, Materializer, Outlet, SourceShape}
+import com.sksamuel.elastic4s.ElasticDsl._
+import com.sksamuel.elastic4s.{ElasticRequest, Show}
+import com.sksamuel.elastic4s.requests.searches.{SearchHit, SearchRequest, SearchResponse}
+import javax.inject.{Inject, Singleton}
+import org.elastic4play.{IndexNotFoundException, SearchError}
+import play.api.libs.json._
+import play.api.{Configuration, Logger}
+
+import scala.collection.mutable
+import scala.concurrent.duration.{DurationLong, FiniteDuration}
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.{Failure, Success, Try}
+
+/**
+ * Service class responsible for entity search
+ */
+@Singleton
+class DBFind(pageSize: Int, keepAlive: FiniteDuration, db: DBConfiguration, implicit val mat: Materializer) {
+
+ @Inject def this(configuration: Configuration, db: DBConfiguration, mat: Materializer) =
+ this(configuration.get[Int]("search.pagesize"), configuration.getMillis("search.keepalive").millis, db, mat)
+
+ val keepAliveStr: String = s"${keepAlive.toMillis}ms"
+ private[DBFind] lazy val logger = Logger(getClass)
+
+ /**
+ * return a new instance of DBFind but using another DBConfiguration
+ */
+ def switchTo(otherDB: DBConfiguration) = new DBFind(pageSize, keepAlive, otherDB, mat)
+
+ /**
+ * Extract offset and limit from optional range
+ * Range has the following format : "start-end"
+ * If format is invalid of range is None, this function returns (0, 10)
+ */
+ private[database] def getOffsetAndLimitFromRange(range: Option[String]): (Int, Int) =
+ range match {
+ case None => (0, 10)
+ case Some("all") => (0, Int.MaxValue)
+ case Some(r) =>
+ val Array(_offset, _end, _*) = (r + "-0").split("-", 3)
+ val offset = Try(Math.max(0, _offset.toInt)).getOrElse(0)
+ val end = Try(_end.toInt).getOrElse(offset + 10)
+ if (end <= offset)
+ (offset, 10)
+ else
+ (offset, end - offset)
+ }
+
+ /**
+ * Execute the search definition using scroll
+ */
+ private[database] def searchWithScroll(searchRequest: SearchRequest, offset: Int, limit: Int)(
+ implicit ec: ExecutionContext
+ ): (Source[SearchHit, NotUsed], Future[Long]) = {
+ val searchWithScroll = new SearchWithScroll(db, searchRequest, keepAliveStr, offset, limit)
+ (Source.fromGraph(searchWithScroll), searchWithScroll.totalHits)
+ }
+
+ /**
+ * Execute the search definition
+ */
+ private[database] def searchWithoutScroll(searchRequest: SearchRequest, offset: Int, limit: Int)(
+ implicit ec: ExecutionContext
+ ): (Source[SearchHit, NotUsed], Future[Long]) = {
+ val resp = db.execute(searchRequest.start(offset).limit(limit))
+ val total = resp.map(_.totalHits)
+ val src = Source
+ .future(resp)
+ .mapConcat { resp =>
+ resp.hits.hits.toList
+ }
+ (src, total)
+ }
+
+ def showQuery(request: SearchRequest): String =
+ Show[ElasticRequest].show(SearchHandler.build(request))
+
+ /**
+ * Search entities in ElasticSearch
+ *
+ * @param range first and last entities to retrieve, for example "23-42" (default value is "0-10")
+ * @param sortBy define order of the entities by specifying field names used in sort. Fields can be prefixed by
+ * "-" for descendant or "+" for ascendant sort (ascendant by default).
+ * @param query a function that build a SearchRequest using the index name
+ * @return Source (akka stream) of JsObject. The source is materialized as future of long that contains the total number of entities.
+ */
+ def apply(range: Option[String], sortBy: Seq[String])(
+ query: String => SearchRequest
+ )(implicit ec: ExecutionContext): (Source[JsObject, NotUsed], Future[Long]) = {
+ val (offset, limit) = getOffsetAndLimitFromRange(range)
+ val sortDef = DBUtils.sortDefinition(sortBy)
+ val searchRequest = query(db.indexName).start(offset).sortBy(sortDef).seqNoPrimaryTerm(true)
+
+ logger.debug(
+ s"search in ${searchRequest.indexes.values.mkString(",")} ${showQuery(searchRequest)}"
+ )
+ val (src, total) = if (limit > 2 * pageSize) {
+ searchWithScroll(searchRequest, offset, limit)
+ } else {
+ searchWithoutScroll(searchRequest, offset, limit)
+ }
+
+ (src.map(DBUtils.hit2json), total)
+ }
+
+ /**
+ * Execute the search definition
+ * This function is used to run aggregations
+ */
+ def apply(query: String => SearchRequest)(implicit ec: ExecutionContext): Future[SearchResponse] = {
+ val searchRequest = query(db.indexName)
+ logger.debug(
+ s"search in ${searchRequest.indexes.values.mkString(",")} ${showQuery(searchRequest)}"
+ )
+
+ db.execute(searchRequest)
+ .recoverWith {
+ case t if t == IndexNotFoundException => Future.failed(t)
+ case _ => Future.failed(SearchError("Invalid search query"))
+ }
+ }
+}
+
+class SearchWithScroll(db: DBConfiguration, SearchRequest: SearchRequest, keepAliveStr: String, offset: Int, max: Int)(
+ implicit
+ ec: ExecutionContext
+) extends GraphStage[SourceShape[SearchHit]] {
+
+ private[SearchWithScroll] lazy val logger = Logger(getClass)
+ val out: Outlet[SearchHit] = Outlet[SearchHit]("searchHits")
+ val shape: SourceShape[SearchHit] = SourceShape.of(out)
+ val firstResults: Future[SearchResponse] = db.execute(SearchRequest.scroll(keepAliveStr))
+ val totalHits: Future[Long] = firstResults.map(_.totalHits)
+
+ override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) {
+ var processed: Long = 0
+ var skip: Long = offset
+ val queue: mutable.Queue[SearchHit] = mutable.Queue.empty
+ var scrollId: Future[String] = firstResults.map(_.scrollId.get)
+ var firstResultProcessed = false
+
+ setHandler(
+ out,
+ new OutHandler {
+
+ def pushNextHit(): Unit = {
+ push(out, queue.dequeue())
+ processed += 1
+ if (processed >= max) {
+ completeStage()
+ }
+ }
+
+ val firstCallback: AsyncCallback[Try[SearchResponse]] = getAsyncCallback[Try[SearchResponse]] {
+ case Success(searchResponse) if skip > 0 =>
+ if (searchResponse.hits.size <= skip)
+ skip -= searchResponse.hits.size
+ else {
+ queue ++= searchResponse.hits.hits.drop(skip.toInt)
+ skip = 0
+ }
+ firstResultProcessed = true
+ onPull()
+ case Success(searchResponse) =>
+ queue ++= searchResponse.hits.hits
+ firstResultProcessed = true
+ onPull()
+ case Failure(error) =>
+ logger.warn("Search error", error)
+ failStage(error)
+ }
+
+ override def onPull(): Unit =
+ if (firstResultProcessed) {
+ if (processed >= max) completeStage()
+
+ if (queue.isEmpty) {
+ val callback = getAsyncCallback[Try[SearchResponse]] {
+ case Success(searchResponse) if searchResponse.isTimedOut =>
+ logger.warn("Search timeout")
+ failStage(SearchError("Request terminated early or timed out"))
+ case Success(searchResponse) if searchResponse.isEmpty =>
+ completeStage()
+ case Success(searchResponse) if skip > 0 =>
+ if (searchResponse.hits.size <= skip) {
+ skip -= searchResponse.hits.size
+ onPull()
+ } else {
+ queue ++= searchResponse.hits.hits.drop(skip.toInt)
+ skip = 0
+ pushNextHit()
+ }
+ case Success(searchResponse) =>
+ queue ++= searchResponse.hits.hits
+ pushNextHit()
+ case Failure(error) =>
+ logger.warn("Search error", error)
+ failStage(SearchError("Request terminated early or timed out"))
+ }
+ val futureSearchResponse = scrollId.flatMap(s => db.execute(searchScroll(s).keepAlive(keepAliveStr)))
+ scrollId = futureSearchResponse.map(_.scrollId.get)
+ futureSearchResponse.onComplete(callback.invoke)
+ } else {
+ pushNextHit()
+ }
+ } else firstResults.onComplete(firstCallback.invoke)
+ }
+ )
+ override def postStop(): Unit =
+ scrollId.foreach { s =>
+ db.execute(clearScroll(s))
+ }
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/database/DBGet.scala b/elastic4play/app/org/elastic4play/database/DBGet.scala
new file mode 100644
index 000000000..b73e97cb7
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/database/DBGet.scala
@@ -0,0 +1,37 @@
+package org.elastic4play.database
+
+import com.sksamuel.elastic4s.ElasticDsl._
+import javax.inject.{Inject, Singleton}
+import org.elastic4play.NotFoundError
+import play.api.libs.json.JsObject
+
+import scala.concurrent.{ExecutionContext, Future}
+
+@Singleton
+class DBGet @Inject() (db: DBConfiguration) {
+
+ /**
+ * Retrieve entities from ElasticSearch
+ *
+ * @param modelName the name of the model (ie. document type)
+ * @param id identifier of the entity to retrieve
+ * @return the entity
+ */
+ def apply(modelName: String, id: String)(implicit ec: ExecutionContext): Future[JsObject] =
+ db.execute {
+ // Search by id is not possible on child entity without routing information => id query
+ search(db.indexName)
+ .query(idsQuery(id) /*.types(modelName)*/ )
+ .size(1)
+ .seqNoPrimaryTerm(true)
+ }
+ .map { searchResponse =>
+ searchResponse
+ .hits
+ .hits
+ .headOption
+ .fold[JsObject](throw NotFoundError(s"$modelName $id not found")) { hit =>
+ DBUtils.hit2json(hit)
+ }
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/database/DBIndex.scala b/elastic4play/app/org/elastic4play/database/DBIndex.scala
new file mode 100644
index 000000000..70ef9a441
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/database/DBIndex.scala
@@ -0,0 +1,158 @@
+package org.elastic4play.database
+
+import scala.concurrent.{blocking, ExecutionContext, Future}
+import play.api.{Configuration, Logger}
+import com.sksamuel.elastic4s.ElasticDsl._
+import com.sksamuel.elastic4s.requests.indexes.CreateIndexRequest
+import javax.inject.{Inject, Singleton}
+import org.elastic4play.InternalError
+import org.elastic4play.models.{ChildModelDef, ModelAttributes}
+import org.elastic4play.utils.Collection
+
+@Singleton
+class DBIndex(db: DBConfiguration, nbShards: Int, nbReplicas: Int, settings: Map[String, Any]) {
+
+ @Inject def this(configuration: Configuration, db: DBConfiguration) =
+ this(
+ db,
+ configuration.getOptional[Int]("search.nbshards").getOrElse(5),
+ configuration.getOptional[Int]("search.nbreplicas").getOrElse(1),
+ configuration
+ .getOptional[Configuration]("search.settings")
+ .fold(Map.empty[String, Any]) { settings =>
+ settings
+ .entrySet
+ .map { case (k, v) => k -> v.unwrapped()}
+ .toMap
+ }
+ )
+
+ private[DBIndex] lazy val logger = Logger(getClass)
+
+ /** Create a new index. Collect mapping for all attributes of all entities
+ *
+ * @param models list of all ModelAttributes to used in order to build index mapping
+ * @return a future which is completed when index creation is finished
+ */
+ def createIndex(models: Iterable[ModelAttributes])(implicit ec: ExecutionContext): Future[Unit] = {
+ val mappingTemplates = models.flatMap(_.attributes).flatMap(_.elasticTemplate()).toSeq.distinctBy(_.name)
+ val fields = models.flatMap(_.attributes.filterNot(_.attributeName == "_id")).toSeq.distinctBy(_.attributeName).map(_.elasticMapping)
+ val relationsField = models
+ .map {
+ case child: ChildModelDef[_, _, _, _] => child.parentModel.modelName -> Seq(child.modelName)
+ case model => model.modelName -> Seq(s"dummy-${model.modelName}")
+ }
+ .groupBy(_._1)
+ .foldLeft(joinField("relations")) {
+ case (join, (parent, child)) => join.relation(parent, child.flatMap(_._2).toSeq)
+ }
+
+ for {
+ majorVersion <- nodeMajorVersion
+ modelMapping = properties(fields :+ relationsField)
+ .dateDetection(false)
+ .numericDetection(false)
+ .templates(mappingTemplates)
+ createIndexRequest = CreateIndexRequest(db.indexName)
+ .mapping(modelMapping)
+ .shards(nbShards)
+ .replicas(nbReplicas)
+ createIndexRequestWithSettings = majorVersion match {
+ case 5 => createIndexRequest.indexSetting("mapping.single_type", true)
+ case _ => createIndexRequest
+ }
+ _ <- db.execute {
+ settings.foldLeft(createIndexRequestWithSettings) {
+ case (cid, (key, value)) => cid.indexSetting(key, value)
+ }
+ }
+ } yield ()
+ }
+
+ /** Tests whether the index exists
+ *
+ * @return future of true if the index exists
+ */
+ def getIndexStatus(implicit ec: ExecutionContext): Future[Boolean] =
+ db.execute {
+ indexExists(db.indexName)
+ }
+ .map {
+ _.isExists
+ }
+
+ /** Tests whether the index exists
+ *
+ * @return true if the index exists
+ */
+ def indexStatus(implicit ec: ExecutionContext): Boolean = blocking {
+ getIndexStatus.await
+ }
+
+ /** Get the number of document of this type
+ *
+ * @param modelName name of the document type from which the count must be done
+ * @return document count
+ */
+ def getSize(modelName: String)(implicit ec: ExecutionContext): Future[Long] =
+ db.execute {
+ search(db.indexName).matchQuery("relations", modelName).size(0)
+ }
+ .map {
+ _.totalHits
+ }
+ .recover { case _ => 0L }
+
+ /** Get cluster status:
+ * 0: green
+ * 1: yellow
+ * 2: red
+ *
+ * @return cluster status
+ */
+ def getClusterStatus(implicit ec: ExecutionContext): Future[Int] =
+ db.execute {
+ clusterHealth(db.indexName)
+ }
+ .map {
+ _.status match {
+ case "green" => 0
+ case "yellow" => 1
+ case "red" => 2
+ case status =>
+ logger.error(s"unknown cluster status: $status")
+ 2
+ }
+ }
+ .recover { case _ => 2 }
+
+ def nodeVersions(implicit ec: ExecutionContext): Future[Seq[String]] =
+ db.execute {
+ nodeInfo()
+ }
+ .map(_.nodes.values.map(_.version).toSeq.distinct)
+
+ def nodeMajorVersion(implicit ec: ExecutionContext): Future[Int] =
+ nodeVersions.flatMap { v =>
+ val majorVersions = v.map(_.takeWhile(_ != '.')).distinct.map(_.toInt)
+ if (majorVersions.size == 1)
+ Future.successful(majorVersions.head)
+ else
+ Future.failed(InternalError(s"The ElasticSearch cluster contains node with different major versions ($v)"))
+ }
+
+ def clusterStatus(implicit ec: ExecutionContext): Int = blocking {
+ getClusterStatus.await
+ }
+
+ def getClusterStatusName(implicit ec: ExecutionContext): Future[String] = getClusterStatus.map {
+ case 0 => "OK"
+ case 1 => "WARNING"
+ case 2 => "ERROR"
+ case _ => "UNKNOWN"
+ }
+
+ def clusterStatusName(implicit ec: ExecutionContext): String = blocking {
+ getClusterStatusName.await
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/database/DBModify.scala b/elastic4play/app/org/elastic4play/database/DBModify.scala
new file mode 100644
index 000000000..efc353bc4
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/database/DBModify.scala
@@ -0,0 +1,104 @@
+package org.elastic4play.database
+
+import java.util.{Map => JMap}
+
+import com.sksamuel.elastic4s.ElasticDsl._
+import com.sksamuel.elastic4s.JacksonSupport
+import com.sksamuel.elastic4s.requests.common.RefreshPolicy
+import com.sksamuel.elastic4s.requests.script.Script
+import javax.inject.{Inject, Singleton}
+import org.elastic4play.models.BaseEntity
+import play.api.Logger
+import play.api.libs.json._
+import scala.jdk.CollectionConverters._
+import scala.concurrent.{ExecutionContext, Future}
+import java.util.{Map => JMap}
+
+case class ModifyConfig(
+ retryOnConflict: Int = 5,
+ refreshPolicy: RefreshPolicy = RefreshPolicy.WAIT_FOR,
+ seqNoAndPrimaryTerm: Option[(Long, Long)] = None
+)
+
+object ModifyConfig {
+ def default: ModifyConfig = ModifyConfig(5, RefreshPolicy.WAIT_FOR, None)
+}
+
+@Singleton
+class DBModify @Inject() (db: DBConfiguration) {
+ private[DBModify] lazy val logger = Logger(getClass)
+
+ /**
+ * Convert JSON value to java native value
+ */
+ private[database] def jsonToAny(json: JsValue): Any = {
+ json match {
+ case v: JsObject => v.fields.map { case (k, v) => k -> jsonToAny(v) }.toMap.asJava
+ case v: JsArray => v.value.map(jsonToAny).toArray
+ case v: JsNumber => v.value.toLong
+ case v: JsString => v.value
+ case v: JsBoolean => v.value
+ case JsNull => null
+ }
+ }
+
+ /**
+ * Build the parameters needed to update ElasticSearch document
+ * Parameters contains update script, parameters for the script
+ * As null is a valid value to set, in order to remove an attribute an empty array must be used.
+ *
+ * @param entity entity to update
+ * @param updateAttributes contains attributes to update. JSON object contains key (attribute name) and value.
+ * Sub attribute can be updated using dot notation ("attr.subattribute").
+ * @return ElasticSearch update script
+ */
+ private[database] def buildScript(entity: BaseEntity, updateAttributes: JsObject): Script = {
+ val attrs = updateAttributes.fields.zipWithIndex
+ val updateScript = attrs.map {
+ case ((name, JsArray(Seq())), _) =>
+ val names = name.split("\\.")
+ names.init.map(n => s"""["$n"]""").mkString("ctx._source", "", s""".remove("${names.last}")""")
+ case ((name, JsNull), _) =>
+ name.split("\\.").map(n => s"""["$n"]""").mkString("ctx._source", "", s"=null")
+ case ((name, _), index) =>
+ name.split("\\.").map(n => s"""["$n"]""").mkString("ctx._source", "", s"=params.param$index")
+ } mkString ";"
+
+ val parameters = jsonToAny(JsObject(attrs.collect {
+ case ((_, value), index) if value != JsArray(Nil) && value != JsNull => s"param$index" -> value
+ })).asInstanceOf[JMap[String, Any]].asScala.toMap
+
+ Script(updateScript).params(parameters)
+ }
+
+ /**
+ * Update entity with new attributes contained in JSON object
+ *
+ * @param entity entity to update
+ * @param updateAttributes contains attributes to update. JSON object contains key (attribute name) and value.
+ * Sub attribute can be updated using dot notation ("attr.subattribute").
+ * @param modifyConfig modification parameter (retryOnConflict and refresh policy)
+ * @return new version of the entity
+ */
+ def apply(entity: BaseEntity, updateAttributes: JsObject, modifyConfig: ModifyConfig)(implicit ec: ExecutionContext): Future[BaseEntity] =
+ db.execute {
+ val updateDefinition = updateById(db.indexName, entity.id)
+ .routing(entity.routing)
+ .script(buildScript(entity, updateAttributes))
+ .fetchSource(true)
+ .retryOnConflict(modifyConfig.retryOnConflict)
+ .refresh(modifyConfig.refreshPolicy)
+ modifyConfig.seqNoAndPrimaryTerm.fold(updateDefinition)(s => updateDefinition.ifSeqNo(s._1).ifPrimaryTerm(s._2))
+ }
+ .map { updateResponse =>
+ entity.model(
+ Json.parse(JacksonSupport.mapper.writeValueAsString(updateResponse.source)).as[JsObject] +
+ ("_type" -> JsString(entity.model.modelName)) +
+ ("_id" -> JsString(entity.id)) +
+ ("_routing" -> JsString(entity.routing)) +
+ ("_parent" -> entity.parentId.fold[JsValue](JsNull)(JsString)) +
+ ("_seqNo" -> JsNumber(updateResponse.seqNo)) +
+ ("_primaryTerm" -> JsNumber(updateResponse.primaryTerm))
+ )
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/database/DBRemove.scala b/elastic4play/app/org/elastic4play/database/DBRemove.scala
new file mode 100644
index 000000000..86372f2d3
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/database/DBRemove.scala
@@ -0,0 +1,26 @@
+package org.elastic4play.database
+
+import com.sksamuel.elastic4s.ElasticDsl._
+import com.sksamuel.elastic4s.requests.common.RefreshPolicy
+import javax.inject.{Inject, Singleton}
+import org.elastic4play.models.BaseEntity
+import play.api.Logger
+
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.Success
+
+@Singleton
+class DBRemove @Inject() (db: DBConfiguration) {
+
+ lazy val logger: Logger = Logger(getClass)
+
+ def apply(entity: BaseEntity)(implicit ec: ExecutionContext): Future[Boolean] = {
+ logger.debug(s"Remove ${entity.model.modelName} ${entity.id}")
+ db.execute {
+ deleteById(db.indexName, entity.id)
+ .routing(entity.routing)
+ .refresh(RefreshPolicy.WAIT_FOR)
+ }
+ .transform(r => Success(r.isSuccess))
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/database/DBSequence.scala b/elastic4play/app/org/elastic4play/database/DBSequence.scala
new file mode 100644
index 000000000..20f9b3d90
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/database/DBSequence.scala
@@ -0,0 +1,28 @@
+package org.elastic4play.database
+
+import com.sksamuel.elastic4s.ElasticDsl._
+import com.sksamuel.elastic4s.requests.common.RefreshPolicy
+import javax.inject.{Inject, Singleton}
+import org.elastic4play.models.{Attribute, ModelAttributes, AttributeFormat => F, AttributeOption => O}
+
+import scala.concurrent.{ExecutionContext, Future}
+
+class SequenceModel extends ModelAttributes("sequence") {
+ val counter: Attribute[Long] = attribute("sequenceCounter", F.numberFmt, "Value of the sequence", O.model)
+}
+
+@Singleton
+class DBSequence @Inject() (db: DBConfiguration) {
+
+ def apply(seqId: String)(implicit ec: ExecutionContext): Future[Int] =
+ db.execute {
+ updateById(db.indexName, s"sequence_$seqId")
+ .upsert("sequenceCounter" -> 1, "relations" -> "sequence")
+ .script("ctx._source.sequenceCounter += 1")
+ .retryOnConflict(5)
+ .fetchSource(true)
+ .refresh(RefreshPolicy.WAIT_FOR)
+ } map { updateResponse =>
+ updateResponse.source("sequenceCounter").asInstanceOf[Int]
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/database/DBUtils.scala b/elastic4play/app/org/elastic4play/database/DBUtils.scala
new file mode 100644
index 000000000..440e926f0
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/database/DBUtils.scala
@@ -0,0 +1,55 @@
+package org.elastic4play.database
+
+import play.api.libs.json._
+import com.sksamuel.elastic4s.ElasticDsl.fieldSort
+import com.sksamuel.elastic4s.requests.searches.SearchHit
+import com.sksamuel.elastic4s.requests.searches.sort.{Sort, SortOrder}
+import org.elastic4play.utils
+
+object DBUtils {
+
+ def sortDefinition(sortBy: Seq[String]): Seq[Sort] = {
+ sortBy
+ .map {
+ case f if f.startsWith("+") => f.drop(1) -> fieldSort(f.drop(1)).order(SortOrder.ASC)
+ case f if f.startsWith("-") => f.drop(1) -> fieldSort(f.drop(1)).order(SortOrder.DESC)
+ case f if f.nonEmpty => f -> fieldSort(f)
+ }
+ // then remove duplicates
+ // Same as : val fieldSortDefs = byFieldList.groupBy(_._1).map(_._2.head).values.toSeq
+ .distinctBy(_._1)
+ .map(_._2) :+ fieldSort("_doc").order(SortOrder.DESC)
+ }
+
+ private def toJson(any: Any): JsValue =
+ any match {
+ case m: Map[_, _] => JsObject(m.toSeq.map { case (k, v) => k.toString -> toJson(v) })
+ case s: String => JsString(s)
+ case l: Long => JsNumber(l)
+ case i: Int => JsNumber(i)
+ case d: Double => JsNumber(d)
+ case f: Float => JsNumber(f)
+ case b: Boolean => JsBoolean(b)
+ case null => JsNull
+ case s: Seq[_] => JsArray(s.map(toJson))
+ }
+
+ /** Transform search hit into JsObject
+ * This function parses hit source add _type, _routing, _parent, _id, _seqNo and _primaryTerm attributes
+ */
+ def hit2json(hit: SearchHit): JsObject = {
+ val id = JsString(hit.id)
+ val body = toJson(hit.sourceAsMap).as[JsObject]
+ val (parent, model) = (body \ "relations" \ "parent").asOpt[JsString] match {
+ case Some(p) => p -> (body \ "relations" \ "name").as[JsString]
+ case None => JsNull -> (body \ "relations").as[JsString]
+ }
+ body - "relations" +
+ ("_type" -> model) +
+ ("_routing" -> hit.routing.fold(id)(JsString.apply)) +
+ ("_parent" -> parent) +
+ ("_id" -> id) +
+ ("_seqNo" -> JsNumber(hit.seqNo)) +
+ ("_primaryTerm" -> JsNumber(hit.primaryTerm))
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/models/AttachmentAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/AttachmentAttributeFormat.scala
new file mode 100644
index 000000000..ba0d22c42
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/AttachmentAttributeFormat.scala
@@ -0,0 +1,71 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.{keywordField, longField, nestedField}
+import com.sksamuel.elastic4s.fields.NestedField
+import org.elastic4play.controllers.JsonFormat._
+import org.elastic4play.controllers.{AttachmentInputValue, FileInputValue, InputValue, JsonInputValue}
+import org.elastic4play.services.JsonFormat.attachmentFormat
+import org.elastic4play.services.{Attachment, DBLists}
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError}
+import org.scalactic._
+import play.api.Logger
+import play.api.libs.json.{JsValue, Json}
+
+object AttachmentAttributeFormat extends AttributeFormat[Attachment]("attachment") {
+ private[AttachmentAttributeFormat] lazy val logger = Logger(getClass)
+
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = {
+ lazy val validJson = fileInputValueFormat.reads(value).asOpt orElse jsFormat.reads(value).asOpt
+ val result =
+ if (subNames.isEmpty && validJson.isDefined)
+ Good(value)
+ else
+ formatError(JsonInputValue(value))
+ logger.debug(s"checkJson($subNames, $value) ⇒ $result")
+ result
+ }
+
+ val forbiddenChar = Seq('/', '\n', '\r', '\t', '\u0000', '\f', '`', '?', '*', '\\', '<', '>', '|', '\"', ':', ';')
+
+ override def inputValueToJson(subNames: Seq[String], value: InputValue): JsValue Or Every[AttributeError] = {
+ val result =
+ if (subNames.nonEmpty)
+ formatError(value)
+ else
+ value match {
+ case fiv: FileInputValue if fiv.name.intersect(forbiddenChar).isEmpty => Good(Json.toJson(fiv)(fileInputValueFormat))
+ case aiv: AttachmentInputValue => Good(Json.toJson(aiv.toAttachment)(jsFormat))
+ case JsonInputValue(json) if attachmentInputValueReads.reads(json).isSuccess => Good(json)
+ case _ => formatError(value)
+ }
+ logger.debug(s"inputValueToJson($subNames, $value) ⇒ $result")
+ result
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): Attachment Or Every[AttributeError] = {
+ val result = value match {
+ case JsonInputValue(json) if subNames.isEmpty =>
+ attachmentInputValueReads.reads(json).map(aiv => Good(aiv.toAttachment)).getOrElse(formatError(value))
+ case _ => formatError(value)
+ }
+ logger.debug(s"fromInputValue($subNames, $value) ⇒ $result")
+ result
+ }
+
+ override def elasticType(attributeName: String): NestedField =
+ nestedField(attributeName).fields(
+ keywordField("name"),
+ keywordField("hashes"),
+ longField("size"),
+ keywordField("contentType"),
+ keywordField("id")
+ )
+
+ override def definition(dblists: DBLists, attribute: Attribute[Attachment]): Seq[AttributeDefinition] =
+ Seq(
+ AttributeDefinition(s"${attribute.attributeName}.name", "string", s"file name of ${attribute.description}", Nil, Nil),
+ AttributeDefinition(s"${attribute.attributeName}.hash", "hash", s"hash of ${attribute.description}", Nil, Nil),
+ AttributeDefinition(s"${attribute.attributeName}.size", "number", s"file size of ${attribute.description}", Nil, Nil),
+ AttributeDefinition(s"${attribute.attributeName}.contentType", "string", s"content type of ${attribute.description}", Nil, Nil)
+ )
+}
diff --git a/elastic4play/app/org/elastic4play/models/Attributes.scala b/elastic4play/app/org/elastic4play/models/Attributes.scala
new file mode 100644
index 000000000..72ed156ab
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/Attributes.scala
@@ -0,0 +1,144 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.fields.{ElasticField, TextField}
+import com.sksamuel.elastic4s.requests.mappings.dynamictemplate.DynamicTemplateRequest
+import org.elastic4play.controllers.InputValue
+import org.elastic4play.services.{Attachment, DBLists}
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError, MissingAttributeError, UpdateReadOnlyAttributeError}
+import org.scalactic._
+import play.api.Logger
+import play.api.libs.json.{Format, JsArray, JsNull, JsValue}
+
+case class AttributeDefinition(name: String, `type`: String, description: String, values: Seq[JsValue], labels: Seq[String])
+
+abstract class AttributeFormat[T](val name: String)(implicit val jsFormat: Format[T]) {
+ def checkJson(subNames: Seq[String], value: JsValue): JsValue Or Every[AttributeError]
+
+ def checkJsonForCreation(subNames: Seq[String], value: JsValue): JsValue Or Every[AttributeError] =
+ checkJson(subNames, value)
+
+ def checkJsonForUpdate(subNames: Seq[String], value: JsValue): JsValue Or Every[AttributeError] =
+ checkJson(subNames, value)
+
+ def inputValueToJson(subNames: Seq[String], value: InputValue): JsValue Or Every[AttributeError] =
+ fromInputValue(subNames, value).map(v => jsFormat.writes(v))
+
+ def fromInputValue(subNames: Seq[String], value: InputValue): T Or Every[AttributeError]
+
+ def elasticType(attributeName: String): ElasticField
+
+ def elasticTemplate(attributePath: Seq[String]): Seq[DynamicTemplateRequest] = Nil
+
+ protected def formatError(value: InputValue): Bad[One[InvalidFormatAttributeError]] = Bad(One(InvalidFormatAttributeError("", name, value)))
+
+ def definition(dblists: DBLists, attribute: Attribute[T]): Seq[AttributeDefinition] =
+ Seq(AttributeDefinition(attribute.attributeName, name, attribute.description, Nil, Nil))
+}
+
+object AttributeFormat {
+ val dateFmt: DateAttributeFormat = DateAttributeFormat
+ val textFmt: TextAttributeFormat = TextAttributeFormat
+ val stringFmt: StringAttributeFormat = StringAttributeFormat
+ val userFmt: UserAttributeFormat = UserAttributeFormat
+ val booleanFmt: BooleanAttributeFormat = BooleanAttributeFormat
+ val numberFmt: NumberAttributeFormat = NumberAttributeFormat
+ val attachmentFmt: AttributeFormat[Attachment] = AttachmentAttributeFormat
+ val metricsFmt: MetricsAttributeFormat = MetricsAttributeFormat
+ val customFields: CustomAttributeFormat = CustomAttributeFormat
+ val uuidFmt: UUIDAttributeFormat = UUIDAttributeFormat
+ val hashFmt: AttributeFormat[String] = HashAttributeFormat
+ val binaryFmt: BinaryAttributeFormat = BinaryAttributeFormat
+ val rawFmt: RawAttributeFormat = RawAttributeFormat
+
+ def enumFmt[T <: Enumeration](e: T)(implicit format: Format[T#Value]): EnumerationAttributeFormat[T] = EnumerationAttributeFormat[T](e)
+
+ def listEnumFmt(enumerationName: String)(dblists: DBLists): ListEnumerationAttributeFormat =
+ ListEnumerationAttributeFormat(enumerationName)(dblists)
+
+ def objectFmt(subAttributes: Seq[Attribute[_]]): ObjectAttributeFormat = ObjectAttributeFormat(subAttributes)
+}
+
+object AttributeOption extends Enumeration with HiveEnumeration {
+ type Type = Value
+ val readonly, unaudited, model, form, sensitive = Value
+}
+
+case class Attribute[T](
+ modelName: String,
+ attributeName: String,
+ format: AttributeFormat[T],
+ options: Seq[AttributeOption.Type],
+ defaultValue: Option[() => T],
+ description: String
+) {
+
+ private[Attribute] lazy val logger = Logger(getClass)
+
+ def defaultValueJson: Option[JsValue] = defaultValue.map(d => format.jsFormat.writes(d()))
+
+ lazy val isMulti: Boolean = format match {
+ case _: MultiAttributeFormat[_] => true
+ case _ => false
+ }
+ lazy val isForm: Boolean = !options.contains(AttributeOption.model)
+ lazy val isModel: Boolean = !options.contains(AttributeOption.form)
+ lazy val isReadonly: Boolean = options.contains(AttributeOption.readonly)
+ lazy val isUnaudited: Boolean = options.contains(AttributeOption.unaudited) || isSensitive || isReadonly
+ lazy val isSensitive: Boolean = options.contains(AttributeOption.sensitive)
+ lazy val isRequired: Boolean = format match {
+ case _: OptionalAttributeFormat[_] => false
+ case _: MultiAttributeFormat[_] => false
+ case _ => true
+ }
+
+ def elasticMapping: ElasticField = format.elasticType(attributeName) match {
+ case a: TextField if isSensitive && a.`type` == "String" => a.index(false)
+ case a => a
+ }
+
+ def elasticTemplate(attributePath: Seq[String] = Nil): Seq[DynamicTemplateRequest] =
+ format.elasticTemplate(attributePath :+ attributeName)
+
+ def validateForCreation(value: Option[JsValue]): Option[JsValue] Or Every[AttributeError] = {
+ val result = value match {
+ case Some(JsNull) if !isRequired => Good(value)
+ case Some(JsArray(Seq())) if !isRequired => Good(value)
+ case None if !isRequired => Good(value)
+ case Some(JsNull) | Some(JsArray(Seq())) | None =>
+ if (defaultValueJson.isDefined)
+ Good(defaultValueJson)
+ else
+ Bad(One(MissingAttributeError(attributeName)))
+ case Some(v) =>
+ format
+ .checkJsonForCreation(Nil, v)
+ .transform(
+ g => Good(Some(g)),
+ x =>
+ Bad(x.map {
+ case ifae: InvalidFormatAttributeError => ifae.copy(name = attributeName)
+ case other => other
+ })
+ )
+ }
+ logger.debug(s"$modelName.$attributeName(${format.name}).validateForCreation($value) ⇒ $result")
+ result
+ }
+
+ def validateForUpdate(subNames: Seq[String], value: JsValue): JsValue Or Every[AttributeError] = {
+ val result = value match {
+ case _ if isReadonly => Bad(One(UpdateReadOnlyAttributeError(attributeName)))
+ case JsNull | JsArray(Seq()) if isRequired => Bad(One(MissingAttributeError(attributeName)))
+ case JsNull | JsArray(Seq()) => Good(value)
+ case v =>
+ format
+ .checkJsonForUpdate(subNames, v)
+ .badMap(_.map {
+ case ifae: InvalidFormatAttributeError => ifae.copy(name = attributeName)
+ case other => other
+ })
+ }
+ logger.debug(s"$modelName.$attributeName(${format.name}).validateForUpdate($value) ⇒ $result")
+ result
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/models/BinaryAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/BinaryAttributeFormat.scala
new file mode 100644
index 000000000..e39741771
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/BinaryAttributeFormat.scala
@@ -0,0 +1,22 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.binaryField
+import com.sksamuel.elastic4s.fields.ElasticField
+import org.elastic4play.controllers.{InputValue, JsonInputValue}
+import org.elastic4play.models.JsonFormat.binaryFormats
+import org.elastic4play.services.DBLists
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError}
+import org.scalactic._
+import play.api.libs.json.JsValue
+
+class BinaryAttributeFormat extends AttributeFormat[Array[Byte]]("binary")(binaryFormats) {
+ override def checkJson(subNames: Seq[String], value: JsValue): Bad[One[InvalidFormatAttributeError]] = formatError(JsonInputValue(value))
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): Array[Byte] Or Every[AttributeError] = formatError(value)
+
+ override def elasticType(attributeName: String): ElasticField = binaryField(attributeName)
+
+ override def definition(dblists: DBLists, attribute: Attribute[Array[Byte]]): Seq[AttributeDefinition] = Nil
+}
+
+object BinaryAttributeFormat extends BinaryAttributeFormat
diff --git a/elastic4play/app/org/elastic4play/models/BooleanAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/BooleanAttributeFormat.scala
new file mode 100644
index 000000000..62903745c
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/BooleanAttributeFormat.scala
@@ -0,0 +1,33 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.booleanField
+import com.sksamuel.elastic4s.fields.ElasticField
+import org.elastic4play.controllers.{InputValue, JsonInputValue, StringInputValue}
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError}
+import org.scalactic._
+import play.api.libs.json.{JsBoolean, JsValue}
+
+class BooleanAttributeFormat extends AttributeFormat[Boolean]("boolean") {
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match {
+ case _: JsBoolean if subNames.isEmpty => Good(value)
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): Boolean Or Every[AttributeError] =
+ if (subNames.nonEmpty)
+ formatError(value)
+ else
+ value match {
+ case StringInputValue(Seq(v)) =>
+ try Good(v.toBoolean)
+ catch {
+ case _: Throwable => formatError(value)
+ }
+ case JsonInputValue(JsBoolean(v)) => Good(v)
+ case _ => formatError(value)
+ }
+
+ override def elasticType(attributeName: String): ElasticField = booleanField(attributeName)
+}
+
+object BooleanAttributeFormat extends BooleanAttributeFormat
diff --git a/elastic4play/app/org/elastic4play/models/CustomAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/CustomAttributeFormat.scala
new file mode 100644
index 000000000..f2e00d8d1
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/CustomAttributeFormat.scala
@@ -0,0 +1,93 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl._
+import com.sksamuel.elastic4s.fields.ElasticField
+import com.sksamuel.elastic4s.requests.mappings.dynamictemplate.DynamicTemplateRequest
+import org.elastic4play.AttributeError
+import org.elastic4play.controllers.{InputValue, JsonInputValue}
+import org.elastic4play.services.DBLists
+import org.scalactic._
+import play.api.Logger
+import play.api.libs.json._
+
+class CustomAttributeFormat extends AttributeFormat[JsValue]("custom") {
+ private[CustomAttributeFormat] lazy val logger = Logger(getClass)
+
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, Every[AttributeError]] = fromInputValue(subNames, JsonInputValue(value))
+
+ override def checkJsonForCreation(subNames: Seq[String], value: JsValue): Or[JsValue, Every[AttributeError]] = {
+ val result =
+ if (subNames.isEmpty && objectIsValid(value)) Good(value)
+ else formatError(JsonInputValue(value))
+ logger.debug(s"checkJsonForCreation($subNames, $value) ⇒ $result")
+ result
+ }
+
+ private def objectIsValid(v: JsValue) = v match {
+ case JsObject(fields) => fields.values.forall(objectFieldsIsValid)
+ case _ => false
+ }
+
+ private def objectFieldsIsValid(v: JsValue) = v match {
+ case JsObject(fields) => fields.forall(fieldIsValid)
+ case _ => false
+ }
+
+ private def fieldIsValid(f: (String, JsValue)): Boolean = f match {
+ case ("number", _: JsNumber | JsNull) => true
+ case ("string", _: JsString | JsNull) => true
+ case ("date", JsString(d)) => DateAttributeFormat.parse(d).isDefined
+ case ("date", JsNull) => true
+ case ("date", _: JsNumber | JsNull) => true
+ case ("boolean", _: JsBoolean | JsNull) => true
+ case ("order", _: JsNumber | JsNull) => true
+ case _ => false
+ }
+
+ override def checkJsonForUpdate(subNames: Seq[String], value: JsValue): Or[JsValue, Every[AttributeError]] = {
+ val result = (subNames, value) match {
+ case (Nil, _) => checkJsonForCreation(subNames, value)
+ case (Seq(_), v) => if (objectFieldsIsValid(v)) Good(value) else formatError(JsonInputValue(value))
+ case (Seq(_, tpe), v) => if (fieldIsValid(tpe -> v)) Good(value) else formatError(JsonInputValue(value))
+ case _ => formatError(JsonInputValue(value))
+ }
+ logger.debug(s"checkJsonForUpdate($subNames, $value) ⇒ $result")
+ result
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): JsValue Or Every[AttributeError] =
+ value match {
+ case JsonInputValue(v) => checkJsonForUpdate(subNames, v)
+ case _ => formatError(value)
+ }
+
+ override def elasticType(attributeName: String): ElasticField =
+ nestedField(attributeName)
+
+ override def elasticTemplate(attributePath: Seq[String] = Nil): Seq[DynamicTemplateRequest] = {
+ val name = attributePath.mkString("_")
+ DynamicTemplateRequest(
+ name,
+ nestedField(name).fields(
+ longField("number"),
+ keywordField("string"),
+ dateField("date").format("epoch_millis||basic_date_time_no_millis"),
+ booleanField("boolean"),
+ longField("order")
+ )
+ ).pathMatch(attributePath.mkString(".") + ".*") :: Nil
+ }
+
+ override def definition(dblists: DBLists, attribute: Attribute[JsValue]): Seq[AttributeDefinition] =
+ dblists("custom_fields").cachedItems.flatMap { item =>
+ val itemObj = item.mapTo[JsObject]
+ for {
+ fieldName <- (itemObj \ "reference").asOpt[String]
+ tpe <- (itemObj \ "type").asOpt[String]
+ description <- (itemObj \ "description").asOpt[String]
+ options <- (itemObj \ "options").asOpt[Seq[JsString]]
+ } yield AttributeDefinition(s"${attribute.attributeName}.$fieldName.$tpe", tpe, description, options, Nil)
+ }
+}
+
+object CustomAttributeFormat extends CustomAttributeFormat
diff --git a/elastic4play/app/org/elastic4play/models/DateAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/DateAttributeFormat.scala
new file mode 100644
index 000000000..b3dc9e4fd
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/DateAttributeFormat.scala
@@ -0,0 +1,45 @@
+package org.elastic4play.models
+
+import java.text.SimpleDateFormat
+import java.util.Date
+import com.sksamuel.elastic4s.ElasticDsl.dateField
+import com.sksamuel.elastic4s.fields.ElasticField
+import org.elastic4play.controllers.{InputValue, JsonInputValue, StringInputValue}
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError}
+import org.scalactic._
+import play.api.libs.json.{JsNumber, JsString, JsValue}
+
+import scala.util.Try
+
+class DateAttributeFormat extends AttributeFormat[Date]("date") {
+
+ def parse(d: String): Option[Date] =
+ Try {
+ val datePattern = "yyyyMMdd'T'HHmmssZ"
+ val df = new SimpleDateFormat(datePattern)
+ df.setLenient(false)
+ df.parse(d)
+ }.orElse(Try(new Date(d.toLong))).toOption
+
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match {
+ case JsString(v) if subNames.isEmpty => parse(v).map(_ => Good(value)).getOrElse(formatError(JsonInputValue(value)))
+ case JsNumber(_) if subNames.isEmpty => Good(value)
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): Date Or Every[AttributeError] =
+ if (subNames.nonEmpty)
+ formatError(value)
+ else {
+ value match {
+ case StringInputValue(Seq(v)) => parse(v).map(Good(_)).getOrElse(formatError(value))
+ case JsonInputValue(JsString(v)) => parse(v).map(Good(_)).getOrElse(formatError(value))
+ case JsonInputValue(JsNumber(v)) => Good(new Date(v.toLong))
+ case _ => formatError(value)
+ }
+ }
+
+ override def elasticType(attributeName: String): ElasticField = dateField(attributeName).format("epoch_millis||basic_date_time_no_millis")
+}
+
+object DateAttributeFormat extends DateAttributeFormat
diff --git a/elastic4play/app/org/elastic4play/models/EnumerationAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/EnumerationAttributeFormat.scala
new file mode 100644
index 000000000..2c615e4f9
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/EnumerationAttributeFormat.scala
@@ -0,0 +1,45 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.keywordField
+import com.sksamuel.elastic4s.fields.ElasticField
+import org.elastic4play.controllers.{InputValue, JsonInputValue, StringInputValue}
+import org.elastic4play.services.DBLists
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError}
+import org.scalactic._
+import play.api.libs.json.{Format, JsString, JsValue}
+
+case class EnumerationAttributeFormat[T <: Enumeration](e: T)(implicit format: Format[T#Value]) extends AttributeFormat[T#Value](s"enumeration") {
+
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match {
+ case JsString(v) if subNames.isEmpty =>
+ try {
+ e.withName(v); Good(value)
+ } catch {
+ case _: Throwable => formatError(JsonInputValue(value))
+ }
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): T#Value Or Every[AttributeError] =
+ if (subNames.nonEmpty)
+ formatError(value)
+ else
+ value match {
+ case StringInputValue(Seq(v)) =>
+ try Good(e.withName(v))
+ catch {
+ case _: Throwable => formatError(value)
+ }
+ case JsonInputValue(JsString(v)) =>
+ try Good(e.withName(v))
+ catch {
+ case _: Throwable => formatError(value)
+ }
+ case _ => formatError(value)
+ }
+
+ override def elasticType(attributeName: String): ElasticField = keywordField(attributeName)
+
+ override def definition(dblists: DBLists, attribute: Attribute[T#Value]): Seq[AttributeDefinition] =
+ Seq(AttributeDefinition(attribute.attributeName, name, attribute.description, e.values.toSeq.map(v => JsString(v.toString)), Nil))
+}
diff --git a/elastic4play/app/org/elastic4play/models/Errors.scala b/elastic4play/app/org/elastic4play/models/Errors.scala
new file mode 100644
index 000000000..945f5a309
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/Errors.scala
@@ -0,0 +1,12 @@
+package org.elastic4play.models
+
+import scala.util.Try
+
+import play.api.libs.json.JsValue.jsValueToJsLookup
+import play.api.libs.json.{JsObject, JsString, Json}
+
+case class InvalidEntityAttributes[M <: BaseModelDef, T](model: M, name: String, format: AttributeFormat[T], attributes: JsObject)
+ extends Exception(
+ s"Entity is not conform to its model ${model.modelName} : missing attribute $name of type ${format.name}\n" +
+ s"${Json.prettyPrint(attributes)}\n =⇒ ${Try(format.jsFormat.reads((attributes \ name).as[JsString]))}"
+ )
diff --git a/elastic4play/app/org/elastic4play/models/HashAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/HashAttributeFormat.scala
new file mode 100644
index 000000000..2a21a9818
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/HashAttributeFormat.scala
@@ -0,0 +1,29 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.keywordField
+import com.sksamuel.elastic4s.fields.ElasticField
+import org.elastic4play.controllers.{InputValue, JsonInputValue, StringInputValue}
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError}
+import org.scalactic._
+import play.api.libs.json.{JsString, JsValue}
+
+object HashAttributeFormat extends AttributeFormat[String]("hash") {
+ val validDigits = "0123456789abcdefABCDEF"
+
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match {
+ case JsString(v) if subNames.isEmpty && v.forall(c => validDigits.contains(c)) => Good(value)
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): String Or Every[AttributeError] =
+ if (subNames.nonEmpty)
+ formatError(value)
+ else
+ value match {
+ case StringInputValue(Seq(v)) if v.forall(c => validDigits.contains(c)) => Good(v.toLowerCase)
+ case JsonInputValue(JsString(v)) if v.forall(c => validDigits.contains(c)) => Good(v.toLowerCase)
+ case _ => formatError(value)
+ }
+
+ override def elasticType(attributeName: String): ElasticField = keywordField(attributeName)
+}
diff --git a/elastic4play/app/org/elastic4play/models/HiveEnumeration.scala b/elastic4play/app/org/elastic4play/models/HiveEnumeration.scala
new file mode 100644
index 000000000..eef0df242
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/HiveEnumeration.scala
@@ -0,0 +1,12 @@
+package org.elastic4play.models
+
+trait HiveEnumeration { self: Enumeration =>
+
+ def getByName(name: String): Value =
+ try {
+ withName(name)
+ } catch {
+ case _: NoSuchElementException => //throw BadRequestError(
+ sys.error(s"$name is invalid for $toString. Correct values are ${values.mkString(", ")}")
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/models/JsonFormat.scala b/elastic4play/app/org/elastic4play/models/JsonFormat.scala
new file mode 100644
index 000000000..47add644a
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/JsonFormat.scala
@@ -0,0 +1,44 @@
+package org.elastic4play.models
+
+import play.api.libs.json._
+
+object JsonFormat {
+ implicit def baseModelEntityWrites[E <: BaseEntity]: Writes[E] = Writes((entity: BaseEntity) => entity.toJson)
+
+ implicit def multiFormat[T](implicit jsFormat: Format[T]): Format[Seq[T]] = Format(Reads.seq(jsFormat), Writes.seq(jsFormat))
+
+ private def optionReads[T](implicit jsReads: Reads[T]) = Reads[Option[T]] {
+ case JsNull => JsSuccess(None)
+ case json => jsReads.reads(json).map(v => Some(v))
+ }
+
+ implicit def optionFormat[T](implicit jsFormat: Format[T]): Format[Option[T]] = Format(optionReads, Writes.OptionWrites)
+
+ def enumReads[E <: Enumeration with HiveEnumeration](e: E): Reads[E#Value] =
+ Reads((json: JsValue) =>
+ json match {
+ case JsString(s) =>
+ import scala.util.Try
+ Try(JsSuccess(e.getByName(s)))
+ .orElse(Try(JsSuccess(e.getByName(s.toLowerCase))))
+ .getOrElse(JsError(s"Enumeration expected of type: '${e.getClass}', but it does not appear to contain the value: '$s'"))
+ case _ => JsError("String value expected")
+ }
+ )
+
+ def enumWrites[E <: Enumeration]: Writes[E#Value] = Writes((v: E#Value) => JsString(v.toString))
+
+ def enumFormat[E <: Enumeration with HiveEnumeration](e: E): Format[E#Value] =
+ Format(enumReads(e), enumWrites)
+
+ private val binaryReads = Reads.apply {
+ case JsString(s) => JsSuccess(java.util.Base64.getDecoder.decode(s))
+ case _ => JsError("")
+ }
+ private val binaryWrites = Writes.apply { bin: Array[Byte] =>
+ JsString(java.util.Base64.getEncoder.encodeToString(bin))
+ }
+ val binaryFormats: Format[Array[Byte]] = Format(binaryReads, binaryWrites)
+
+ implicit val attributeDefinitionWrites: OWrites[AttributeDefinition] = Json.writes[AttributeDefinition]
+}
diff --git a/elastic4play/app/org/elastic4play/models/ListEnumerationAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/ListEnumerationAttributeFormat.scala
new file mode 100644
index 000000000..179364288
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/ListEnumerationAttributeFormat.scala
@@ -0,0 +1,32 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.keywordField
+import com.sksamuel.elastic4s.fields.ElasticField
+import org.elastic4play.controllers.{InputValue, JsonInputValue, StringInputValue}
+import org.elastic4play.services.DBLists
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError}
+import org.scalactic._
+import play.api.libs.json.{JsString, JsValue}
+
+case class ListEnumerationAttributeFormat(enumerationName: String)(dblists: DBLists) extends AttributeFormat[String](s"enumeration") {
+ def items: Set[String] = dblists("list_" + enumerationName).cachedItems.map(_.mapTo[String]).toSet
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match {
+ case JsString(v) if subNames.isEmpty && items.contains(v) => Good(value)
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): String Or Every[AttributeError] =
+ if (subNames.nonEmpty)
+ formatError(value)
+ else
+ value match {
+ case StringInputValue(Seq(v)) if items.contains(v) => Good(v)
+ case JsonInputValue(JsString(v)) if items.contains(v) => Good(v)
+ case _ => formatError(value)
+ }
+
+ override def elasticType(attributeName: String): ElasticField = keywordField(attributeName)
+
+ override def definition(dblists: DBLists, attribute: Attribute[String]): Seq[AttributeDefinition] =
+ Seq(AttributeDefinition(attribute.attributeName, name, attribute.description, items.map(JsString.apply).toSeq, Nil))
+}
diff --git a/elastic4play/app/org/elastic4play/models/MetricsAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/MetricsAttributeFormat.scala
new file mode 100644
index 000000000..713a8e0ce
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/MetricsAttributeFormat.scala
@@ -0,0 +1,51 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.nestedField
+import com.sksamuel.elastic4s.fields.{ElasticField, LongField}
+import com.sksamuel.elastic4s.requests.mappings.dynamictemplate.DynamicTemplateRequest
+import org.elastic4play.AttributeError
+import org.elastic4play.controllers.{InputValue, JsonInputValue}
+import org.elastic4play.services.DBLists
+import org.scalactic.Accumulation._
+import org.scalactic._
+import play.api.libs.json._
+
+class MetricsAttributeFormat extends AttributeFormat[JsValue]("metrics") {
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, Every[AttributeError]] = fromInputValue(subNames, JsonInputValue(value))
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): JsValue Or Every[AttributeError] =
+ if (subNames.isEmpty) {
+ value match {
+ case JsonInputValue(v: JsObject) =>
+ v.fields
+ .validatedBy {
+ case (_, _: JsNumber) => Good(())
+ case (_, JsNull) => Good(())
+ case _ => formatError(value)
+ }
+ .map(_ => v)
+ case _ => formatError(value)
+ }
+ } else {
+ OptionalAttributeFormat(NumberAttributeFormat).inputValueToJson(subNames.tail, value) //.map(v ⇒ JsObject(Seq(subNames.head → v)))
+ }
+
+ override def elasticType(attributeName: String): ElasticField = nestedField(attributeName)
+
+ override def elasticTemplate(attributePath: Seq[String]): Seq[DynamicTemplateRequest] = {
+ val name = attributePath.mkString("_")
+ DynamicTemplateRequest(name, LongField(name))
+ .pathMatch(attributePath.mkString(".") + ".*") :: Nil
+ }
+
+ override def definition(dblists: DBLists, attribute: Attribute[JsValue]): Seq[AttributeDefinition] =
+ dblists("case_metrics").cachedItems.flatMap { item =>
+ val itemObj = item.mapTo[JsObject]
+ for {
+ fieldName <- (itemObj \ "name").asOpt[String]
+ description <- (itemObj \ "description").asOpt[String]
+ } yield AttributeDefinition(s"${attribute.attributeName}.$fieldName", "number", description, Nil, Nil)
+ }
+}
+
+object MetricsAttributeFormat extends MetricsAttributeFormat
diff --git a/elastic4play/app/org/elastic4play/models/ModelDef.scala b/elastic4play/app/org/elastic4play/models/ModelDef.scala
new file mode 100644
index 000000000..dcc74bf76
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/ModelDef.scala
@@ -0,0 +1,238 @@
+package org.elastic4play.models
+
+import java.util.Date
+
+import scala.concurrent.Future
+import scala.language.higherKinds
+
+import play.api.libs.json.JsValue.jsValueToJsLookup
+import play.api.libs.json.{JsObject, JsString, Json}
+
+import org.elastic4play.InternalError
+
+trait AttributeDef {
+ type A[B]
+
+ def attribute[T](
+ attributeName: String,
+ format: AttributeFormat[T],
+ description: String,
+ defaultValue: Option[() => T],
+ options: AttributeOption.Type*
+ ): A[T]
+
+ def attribute[T](attributeName: String, format: AttributeFormat[T], description: String, defaultValue: => T, options: AttributeOption.Type*): A[T] =
+ attribute(attributeName, format, description, Some(() => defaultValue), options: _*)
+
+ def attribute[T](attributeName: String, format: AttributeFormat[T], description: String, options: AttributeOption.Type*): A[T] =
+ attribute(attributeName, format, description, None, options: _*)
+
+ def multiAttribute[T](
+ attributeName: String,
+ format: AttributeFormat[T],
+ description: String,
+ defaultValue: Option[() => Seq[T]],
+ options: AttributeOption.Type*
+ ): A[Seq[T]]
+
+ def multiAttribute[T](
+ attributeName: String,
+ format: AttributeFormat[T],
+ description: String,
+ defaultValue: Seq[T],
+ options: AttributeOption.Type*
+ ): A[Seq[T]] =
+ multiAttribute(attributeName, format, description, Some(() => defaultValue), options: _*)
+
+ def multiAttribute[T](attributeName: String, format: AttributeFormat[T], description: String, options: AttributeOption.Type*): A[Seq[T]] =
+ multiAttribute(attributeName, format, description, None, options: _*)
+
+ def optionalAttribute[T](
+ attributeName: String,
+ format: AttributeFormat[T],
+ description: String,
+ defaultValue: Option[() => Option[T]],
+ options: AttributeOption.Type*
+ ): A[Option[T]]
+
+ def optionalAttribute[T](attributeName: String, format: AttributeFormat[T], description: String, options: AttributeOption.Type*): A[Option[T]] =
+ optionalAttribute(attributeName, format, description, None: Option[() => Option[T]], options: _*)
+}
+
+abstract class ModelAttributes(val modelName: String) extends AttributeDef {
+ type A[B] = Attribute[B]
+ private var _attributes: Seq[Attribute[_]] = Nil
+ def attributes: Seq[Attribute[_]] = _attributes
+
+ /* attribute creation helper */
+ def attribute[T](
+ attributeName: String,
+ format: AttributeFormat[T],
+ description: String,
+ defaultValue: Option[() => T],
+ options: AttributeOption.Type*
+ ): Attribute[T] = {
+ val attr = Attribute(modelName, attributeName, format, options, defaultValue, description: String)
+ _attributes = attr +: _attributes
+ attr
+ }
+
+ def multiAttribute[T](
+ attributeName: String,
+ format: AttributeFormat[T],
+ description: String,
+ defaultValue: Option[() => Seq[T]],
+ options: AttributeOption.Type*
+ ): Attribute[Seq[T]] = {
+ val attr = Attribute(modelName, attributeName, MultiAttributeFormat(format), options, defaultValue, description: String)
+ _attributes = attr +: _attributes
+ attr
+ }
+
+ def optionalAttribute[T](
+ attributeName: String,
+ format: AttributeFormat[T],
+ description: String,
+ defaultValue: Option[() => Option[T]],
+ options: AttributeOption.Type*
+ ): Attribute[Option[T]] = {
+ val attr = Attribute(modelName, attributeName, OptionalAttributeFormat(format), options, defaultValue, description: String)
+ _attributes = attr +: _attributes
+ attr
+ }
+
+ val createdBy: Attribute[String] =
+ attribute("createdBy", AttributeFormat.userFmt, "user who created this entity", None, AttributeOption.model, AttributeOption.readonly)
+
+ val createdAt: Attribute[Date] =
+ attribute("createdAt", AttributeFormat.dateFmt, "user who created this entity", new Date, AttributeOption.model, AttributeOption.readonly)
+
+ val updatedBy: Attribute[Option[String]] =
+ optionalAttribute("updatedBy", AttributeFormat.userFmt, "user who created this entity", None, AttributeOption.model)
+
+ val updatedAt: Attribute[Option[Date]] =
+ optionalAttribute("updatedAt", AttributeFormat.dateFmt, "user who created this entity", AttributeOption.model)
+}
+
+abstract class BaseModelDef(modelName: String, val label: String, val path: String) extends ModelAttributes(modelName) {
+ def apply(attributes: JsObject): BaseEntity
+ def removeAttribute: JsObject = throw InternalError(s"$modelName can't be removed")
+
+ /* default sort parameter used in List and Search controllers */
+ def defaultSortBy: Seq[String] = Nil
+
+ /* get attributes definitions for the entity (form, model, required and default values) */
+ def formAttributes: Map[String, Attribute[_]] =
+ attributes.collect { case a if a.isForm => a.attributeName -> a }.toMap
+
+ /* get attributes definitions for the entity (form, model, required and default values) */
+ def modelAttributes: Map[String, Attribute[_]] =
+ attributes.collect { case a if a.isModel => a.attributeName -> a }.toMap
+
+ lazy val attachmentAttributes: Map[String, Boolean] = formAttributes
+ .filter(_._2.format match {
+ case `AttachmentAttributeFormat` => true
+ case OptionalAttributeFormat(fmt) if fmt == AttachmentAttributeFormat => true
+ case MultiAttributeFormat(fmt) if fmt == AttachmentAttributeFormat => true
+ case _ => false
+ })
+ .view
+ .mapValues(_.isRequired)
+ .toMap
+
+ /* this hook, executed on creation can be override by subclass in order to transform entity attributes */
+ def creationHook(parent: Option[BaseEntity], attrs: JsObject): Future[JsObject] = Future.successful(attrs)
+
+ /* this hook, executed on update can be override by subclass in order to transform entity attributes */
+ def updateHook(entity: BaseEntity, updateAttrs: JsObject): Future[JsObject] = Future.successful(updateAttrs)
+
+ def getStats(entity: BaseEntity): Future[JsObject] = Future.successful(JsObject.empty)
+
+ val computedMetrics = Map.empty[String, String]
+}
+
+class BaseEntity(val model: BaseModelDef, val attributes: JsObject) {
+ val id: String = (attributes \ "_id").as[String]
+ val routing: String = (attributes \ "_routing").as[String]
+ lazy val parentId: Option[String] = (attributes \ "_parent").asOpt[String]
+ val seqNo: Long = (attributes \ "_seqNo").as[Long]
+ val primaryTerm: Long = (attributes \ "_primaryTerm").as[Long]
+ def createdBy: String = (attributes \ "createdBy").as[String]
+ def createdAt: Date = (attributes \ "createdAt").as[Date]
+ def updatedBy: String = (attributes \ "updatedBy").as[String]
+ def updatedAt: Date = (attributes \ "updatedAt").as[Date]
+
+ @inline
+ final private def removeProtectedAttributes(attrs: JsObject) = JsObject {
+ attrs
+ .fields
+ .map { case (name, value) => (name, value, model.attributes.find(_.attributeName == name)) }
+ .collect {
+ case (name, value, Some(desc)) if !desc.isSensitive => name -> value
+ case (name, value, _) if name.startsWith("_") => name -> value
+ }
+ }
+
+ def toJson: JsObject =
+ removeProtectedAttributes(attributes) +
+ ("id" -> JsString(id))
+
+ /* compute auxiliary data */
+ override def toString: String = Json.prettyPrint(toJson)
+}
+
+abstract class EntityDef[M <: BaseModelDef, E <: BaseEntity](model: M, attributes: JsObject) extends BaseEntity(model, attributes) with AttributeDef {
+ self: E =>
+ type A[B] = () => B
+
+ def attribute[T](
+ attributeName: String,
+ format: AttributeFormat[T],
+ description: String,
+ defaultValue: Option[() => T],
+ options: AttributeOption.Type*
+ ): A[T] = { () =>
+ (attributes \ attributeName).asOpt[T](format.jsFormat).getOrElse(throw InvalidEntityAttributes[M, T](model, attributeName, format, attributes))
+ }
+
+ def multiAttribute[T](
+ attributeName: String,
+ format: AttributeFormat[T],
+ description: String,
+ defaultValue: Option[() => Seq[T]],
+ options: AttributeOption.Type*
+ ): A[Seq[T]] = { () =>
+ (attributes \ attributeName).asOpt[Seq[T]](MultiAttributeFormat(format).jsFormat).getOrElse(Nil)
+ }
+
+ def optionalAttribute[T](
+ attributeName: String,
+ format: AttributeFormat[T],
+ description: String,
+ defaultValue: Option[() => Option[T]],
+ options: AttributeOption.Type*
+ ): A[Option[T]] = { () =>
+ (attributes \ attributeName).asOpt[T](format.jsFormat)
+ }
+}
+
+abstract class AbstractModelDef[M <: AbstractModelDef[M, E], E <: BaseEntity](modelName: String, label: String, path: String)
+ extends BaseModelDef(modelName, label, path) {
+ override def apply(attributes: JsObject): E
+}
+
+abstract class ModelDef[M <: ModelDef[M, E], E <: BaseEntity](modelName: String, label: String, path: String)(implicit e: Manifest[E])
+ extends AbstractModelDef[M, E](modelName, label, path) { self: M =>
+ override def apply(attributes: JsObject): E =
+ e.runtimeClass.getConstructor(getClass, classOf[JsObject]).newInstance(self, attributes).asInstanceOf[E]
+}
+abstract class ChildModelDef[M <: ChildModelDef[M, E, PM, PE], E <: BaseEntity, PM <: BaseModelDef, PE <: BaseEntity](
+ val parentModel: PM,
+ modelName: String,
+ label: String,
+ path: String
+)(implicit e: Manifest[E])
+ extends AbstractModelDef[M, E](modelName, label, path) { self: M =>
+ override def apply(attributes: JsObject): E =
+ e.runtimeClass.getConstructor(getClass, classOf[JsObject]).newInstance(self, attributes).asInstanceOf[E]
+}
diff --git a/elastic4play/app/org/elastic4play/models/MultiAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/MultiAttributeFormat.scala
new file mode 100644
index 000000000..0663d6132
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/MultiAttributeFormat.scala
@@ -0,0 +1,50 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.fields.ElasticField
+import com.sksamuel.elastic4s.requests.mappings.dynamictemplate.DynamicTemplateRequest
+import org.elastic4play.AttributeError
+import org.elastic4play.controllers.{InputValue, JsonInputValue, StringInputValue}
+import org.elastic4play.models.JsonFormat.multiFormat
+import org.elastic4play.services.DBLists
+import org.scalactic.Accumulation._
+import org.scalactic._
+import play.api.libs.json.{JsArray, JsValue}
+
+case class MultiAttributeFormat[T](attributeFormat: AttributeFormat[T])
+ extends AttributeFormat[Seq[T]]("multi-" + attributeFormat.name)(multiFormat(attributeFormat.jsFormat)) {
+ override def checkJsonForCreation(subNames: Seq[String], value: JsValue): Or[JsArray, Every[AttributeError]] = value match {
+ case JsArray(values) if subNames.isEmpty => values.validatedBy(v => attributeFormat.checkJsonForCreation(Nil, v)).map(JsArray)
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def checkJsonForUpdate(subNames: Seq[String], value: JsValue): Or[JsArray, Every[AttributeError]] = value match {
+ case JsArray(values) if subNames.isEmpty => values.validatedBy(v => attributeFormat.checkJsonForUpdate(Nil, v)).map(JsArray)
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsArray, Every[AttributeError]] = value match {
+ case JsArray(values) if subNames.isEmpty => values.validatedBy(v => attributeFormat.checkJsonForUpdate(Nil, v)).map(JsArray)
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def inputValueToJson(subNames: Seq[String], value: InputValue): JsValue Or Every[AttributeError] = value match {
+ case JsonInputValue(JsArray(xs)) => xs.map(x => JsonInputValue(x)).validatedBy(i => attributeFormat.inputValueToJson(subNames, i)).map(JsArray)
+ case StringInputValue(xs) =>
+ xs.filterNot(_.isEmpty).map(x => StringInputValue(x :: Nil)).validatedBy(i => attributeFormat.inputValueToJson(subNames, i)).map(JsArray.apply)
+ case _ => formatError(value)
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): Seq[T] Or Every[AttributeError] = value match {
+ case JsonInputValue(JsArray(xs)) => xs.map(JsonInputValue).toSeq.validatedBy(i => attributeFormat.fromInputValue(subNames, i))
+ case StringInputValue(xs) =>
+ xs.filterNot(_.isEmpty).map(x => StringInputValue(x :: Nil)).validatedBy(i => attributeFormat.fromInputValue(subNames, i))
+ case _ => formatError(value)
+ }
+
+ override def elasticType(attributeName: String): ElasticField = attributeFormat.elasticType(attributeName)
+
+ override def elasticTemplate(attributePath: Seq[String]): Seq[DynamicTemplateRequest] = attributeFormat.elasticTemplate(attributePath)
+
+ override def definition(dblists: DBLists, attribute: Attribute[Seq[T]]): Seq[AttributeDefinition] =
+ attributeFormat.definition(dblists, attribute.asInstanceOf[Attribute[T]])
+}
diff --git a/elastic4play/app/org/elastic4play/models/NumberAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/NumberAttributeFormat.scala
new file mode 100644
index 000000000..835f2b01a
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/NumberAttributeFormat.scala
@@ -0,0 +1,34 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.longField
+import com.sksamuel.elastic4s.fields.ElasticField
+import org.elastic4play.controllers.{InputValue, JsonInputValue, StringInputValue}
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError}
+import org.scalactic._
+import play.api.libs.json.{JsNumber, JsValue}
+
+class NumberAttributeFormat extends AttributeFormat[Long]("number") {
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match {
+ case _: JsNumber if subNames.isEmpty => Good(value)
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): Long Or Every[AttributeError] =
+ if (subNames.nonEmpty)
+ formatError(value)
+ else
+ value match {
+ case StringInputValue(Seq(v)) =>
+ try Good(v.toLong)
+ catch {
+ case _: Throwable => formatError(value)
+ }
+ case JsonInputValue(JsNumber(v)) => Good(v.longValue)
+ case _ => formatError(value)
+ }
+
+ override def elasticType(attributeName: String): ElasticField = longField(attributeName)
+
+}
+
+object NumberAttributeFormat extends NumberAttributeFormat
diff --git a/elastic4play/app/org/elastic4play/models/ObjectAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/ObjectAttributeFormat.scala
new file mode 100644
index 000000000..54cdc5414
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/ObjectAttributeFormat.scala
@@ -0,0 +1,110 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.nestedField
+import com.sksamuel.elastic4s.fields.ElasticField
+import com.sksamuel.elastic4s.requests.mappings.dynamictemplate.DynamicTemplateRequest
+import org.elastic4play.controllers.JsonFormat.inputValueFormat
+import org.elastic4play.controllers.{InputValue, JsonInputValue}
+import org.elastic4play.services.DBLists
+import org.elastic4play.{AttributeError, UnknownAttributeError}
+import org.scalactic.Accumulation._
+import org.scalactic._
+import play.api.Logger
+import play.api.libs.json._
+
+case class ObjectAttributeFormat(subAttributes: Seq[Attribute[_]]) extends AttributeFormat[JsObject]("nested") {
+ private[ObjectAttributeFormat] lazy val logger = Logger(getClass)
+
+ override def checkJson(subNames: Seq[String], value: JsValue): JsObject Or Every[AttributeError] = checkJsonForCreation(subNames, value)
+
+ override def checkJsonForCreation(subNames: Seq[String], value: JsValue): JsObject Or Every[AttributeError] = {
+ val result = value match {
+ case obj: JsObject if subNames.isEmpty =>
+ subAttributes
+ .validatedBy { attr =>
+ attr.validateForCreation((value \ attr.attributeName).asOpt[JsValue])
+ }
+ .map { _ =>
+ obj
+ }
+ case _ => formatError(JsonInputValue(value))
+ }
+ logger.debug(s"checkJsonForCreation($subNames, $value) ⇒ $result")
+ result
+ }
+
+ override def checkJsonForUpdate(subNames: Seq[String], value: JsValue): JsObject Or Every[AttributeError] =
+ value match {
+ case obj: JsObject if subNames.isEmpty =>
+ obj
+ .fields
+ .validatedBy {
+ case (_name, v) =>
+ subAttributes
+ .find(_.attributeName == _name)
+ .map(_.validateForUpdate(subNames, v))
+ .getOrElse(Bad(One(UnknownAttributeError(_name, v))))
+ }
+ .map { _ =>
+ obj
+ }
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): JsObject Or Every[AttributeError] = {
+ val result = subNames
+ .headOption
+ .map { subName =>
+ subAttributes
+ .find(_.attributeName == subName)
+ .map { subAttribute =>
+ value.jsonValue match {
+ case jsvalue @ (JsNull | JsArray(Seq())) => Good(JsObject(Seq(subName -> jsvalue)))
+ case _ =>
+ subAttribute
+ .format
+ .inputValueToJson(subNames.tail, value)
+ .map(v => JsObject(Seq(subName -> v)))
+ .badMap { errors =>
+ errors.map(e => e.withName(name + "." + e.name))
+ }
+ }
+ }
+ .getOrElse(Bad(One(UnknownAttributeError(name, value.jsonValue))))
+ }
+ .getOrElse {
+ value match {
+ case JsonInputValue(v: JsObject) =>
+ v.fields
+ .validatedBy {
+ case (_, jsvalue) if jsvalue == JsNull || jsvalue == JsArray(Nil) => Good(jsvalue)
+ case (_name, jsvalue) =>
+ subAttributes
+ .find(_.attributeName == _name)
+ .map(_.format.fromInputValue(Nil, JsonInputValue(jsvalue)))
+ .getOrElse(Bad(One(UnknownAttributeError(_name, Json.toJson(value)))))
+ }
+ .map { _ =>
+ v
+ }
+ case _ => formatError(value)
+ }
+ }
+ logger.debug(s"fromInputValue($subNames, $value) ⇒ $result")
+ result
+ }
+
+ override def elasticType(attributeName: String): ElasticField = nestedField(attributeName).fields(subAttributes.map(_.elasticMapping))
+
+ override def elasticTemplate(attributePath: Seq[String]): Seq[DynamicTemplateRequest] =
+ subAttributes.flatMap(_.elasticTemplate(attributePath))
+
+ override def definition(dblists: DBLists, attribute: Attribute[JsObject]): Seq[AttributeDefinition] =
+ subAttributes
+ .flatMap {
+ case subAttribute: Attribute[tpe] => subAttribute.format.definition(dblists, subAttribute)
+ }
+ .map { attributeDefinition =>
+ attributeDefinition.copy(name = s"${attribute.attributeName}.${attributeDefinition.name}")
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/models/OptionalAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/OptionalAttributeFormat.scala
new file mode 100644
index 000000000..b473d3388
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/OptionalAttributeFormat.scala
@@ -0,0 +1,35 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.fields.ElasticField
+import com.sksamuel.elastic4s.requests.mappings.dynamictemplate.DynamicTemplateRequest
+import org.elastic4play.AttributeError
+import org.elastic4play.controllers.{InputValue, JsonInputValue, NullInputValue}
+import org.elastic4play.models.JsonFormat.optionFormat
+import org.elastic4play.services.DBLists
+import org.scalactic._
+import play.api.libs.json.{JsNull, JsValue}
+
+case class OptionalAttributeFormat[T](attributeFormat: AttributeFormat[T])
+ extends AttributeFormat[Option[T]]("optional-" + attributeFormat.name)(optionFormat(attributeFormat.jsFormat)) {
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, Every[AttributeError]] = value match {
+ case JsNull if subNames.isEmpty => Good(value)
+ case _ => attributeFormat.checkJson(subNames, value)
+ }
+
+ override def inputValueToJson(subNames: Seq[String], value: InputValue): JsValue Or Every[AttributeError] = value match {
+ case NullInputValue | JsonInputValue(JsNull) => Good(JsNull)
+ case x => attributeFormat.inputValueToJson(subNames, x)
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): Option[T] Or Every[AttributeError] = value match {
+ case NullInputValue => Good(None)
+ case x => attributeFormat.fromInputValue(subNames, x).map(v => Some(v))
+ }
+
+ override def elasticType(attributeName: String): ElasticField = attributeFormat.elasticType(attributeName)
+
+ override def elasticTemplate(attributePath: Seq[String]): Seq[DynamicTemplateRequest] = attributeFormat.elasticTemplate(attributePath)
+
+ override def definition(dblists: DBLists, attribute: Attribute[Option[T]]): Seq[AttributeDefinition] =
+ attributeFormat.definition(dblists, attribute.asInstanceOf[Attribute[T]])
+}
diff --git a/elastic4play/app/org/elastic4play/models/RawAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/RawAttributeFormat.scala
new file mode 100644
index 000000000..178b6bd79
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/RawAttributeFormat.scala
@@ -0,0 +1,24 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.binaryField
+import com.sksamuel.elastic4s.fields.ElasticField
+import org.elastic4play.controllers.{InputValue, JsonInputValue}
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError}
+import org.scalactic._
+import play.api.libs.json.{JsString, JsValue}
+
+class RawAttributeFormat extends AttributeFormat[String]("raw") {
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match {
+ case _: JsString if subNames.isEmpty => Good(value)
+ case _ => formatError(JsonInputValue(value))
+ }
+ override def fromInputValue(subNames: Seq[String], value: InputValue): String Or Every[AttributeError] =
+ TextAttributeFormat.fromInputValue(subNames, value) match {
+ case Bad(One(ifae: InvalidFormatAttributeError)) => Bad(One(ifae.copy(format = name)))
+ case other => other
+ }
+
+ override def elasticType(attributeName: String): ElasticField = binaryField(attributeName)
+}
+
+object RawAttributeFormat extends RawAttributeFormat
diff --git a/elastic4play/app/org/elastic4play/models/StringAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/StringAttributeFormat.scala
new file mode 100644
index 000000000..137d89347
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/StringAttributeFormat.scala
@@ -0,0 +1,25 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.keywordField
+import com.sksamuel.elastic4s.fields.ElasticField
+import org.elastic4play.controllers.{InputValue, JsonInputValue}
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError}
+import org.scalactic._
+import play.api.libs.json.{JsString, JsValue}
+
+class StringAttributeFormat extends AttributeFormat[String]("string") {
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match {
+ case _: JsString if subNames.isEmpty => Good(value)
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): String Or Every[AttributeError] =
+ TextAttributeFormat.fromInputValue(subNames, value) match {
+ case Bad(One(ifae: InvalidFormatAttributeError)) => Bad(One(ifae.copy(format = name)))
+ case other => other
+ }
+
+ override def elasticType(attributeName: String): ElasticField = keywordField(attributeName)
+}
+
+object StringAttributeFormat extends StringAttributeFormat
diff --git a/elastic4play/app/org/elastic4play/models/TextAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/TextAttributeFormat.scala
new file mode 100644
index 000000000..af1e36dcf
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/TextAttributeFormat.scala
@@ -0,0 +1,29 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.textField
+import com.sksamuel.elastic4s.fields.ElasticField
+import org.elastic4play.controllers.{InputValue, JsonInputValue, StringInputValue}
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError}
+import org.scalactic._
+import play.api.libs.json.{JsString, JsValue}
+
+class TextAttributeFormat extends AttributeFormat[String]("text") {
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match {
+ case _: JsString if subNames.isEmpty => Good(value)
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): String Or Every[AttributeError] =
+ if (subNames.nonEmpty)
+ formatError(value)
+ else
+ value match {
+ case StringInputValue(Seq(v)) => Good(v)
+ case JsonInputValue(JsString(v)) => Good(v)
+ case _ => formatError(value)
+ }
+
+ override def elasticType(attributeName: String): ElasticField = textField(attributeName).fielddata(true)
+}
+
+object TextAttributeFormat extends TextAttributeFormat
diff --git a/elastic4play/app/org/elastic4play/models/UUIDAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/UUIDAttributeFormat.scala
new file mode 100644
index 000000000..a719f05bb
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/UUIDAttributeFormat.scala
@@ -0,0 +1,44 @@
+package org.elastic4play.models
+
+import com.sksamuel.elastic4s.ElasticDsl.keywordField
+import com.sksamuel.elastic4s.fields.ElasticField
+import org.elastic4play.controllers.{InputValue, JsonInputValue, StringInputValue}
+import org.elastic4play.{AttributeError, InvalidFormatAttributeError}
+import org.scalactic._
+import play.api.libs.json.{JsString, JsValue}
+
+import java.util.UUID
+
+class UUIDAttributeFormat extends AttributeFormat[UUID]("uuid") {
+ override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match {
+ case JsString(v) if subNames.isEmpty =>
+ try {
+ UUID.fromString(v); Good(value)
+ } catch {
+ case _: Throwable => formatError(JsonInputValue(value))
+ }
+ case _ => formatError(JsonInputValue(value))
+ }
+
+ override def fromInputValue(subNames: Seq[String], value: InputValue): UUID Or Every[AttributeError] =
+ if (subNames.nonEmpty)
+ formatError(value)
+ else
+ value match {
+ case StringInputValue(Seq(v)) =>
+ try Good(UUID.fromString(v))
+ catch {
+ case _: Throwable => formatError(value)
+ }
+ case JsonInputValue(JsString(v)) =>
+ try Good(UUID.fromString(v))
+ catch {
+ case _: Throwable => formatError(value)
+ }
+ case _ => formatError(value)
+ }
+
+ override def elasticType(attributeName: String): ElasticField = keywordField(attributeName)
+}
+
+object UUIDAttributeFormat extends UUIDAttributeFormat
diff --git a/elastic4play/app/org/elastic4play/models/UserAttributeFormat.scala b/elastic4play/app/org/elastic4play/models/UserAttributeFormat.scala
new file mode 100644
index 000000000..4865eec69
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/models/UserAttributeFormat.scala
@@ -0,0 +1,7 @@
+package org.elastic4play.models
+
+class UserAttributeFormat extends StringAttributeFormat {
+ override val name: String = "user"
+}
+
+object UserAttributeFormat extends UserAttributeFormat
diff --git a/elastic4play/app/org/elastic4play/services/Aggregations.scala b/elastic4play/app/org/elastic4play/services/Aggregations.scala
new file mode 100644
index 000000000..4687c6964
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/Aggregations.scala
@@ -0,0 +1,292 @@
+package org.elastic4play.services
+
+import scala.util.Try
+import play.api.libs.json._
+import com.sksamuel.elastic4s.ElasticDsl._
+import com.sksamuel.elastic4s.JacksonSupport
+import com.sksamuel.elastic4s.requests.script.Script
+import com.sksamuel.elastic4s.requests.searches.DateHistogramInterval
+import com.sksamuel.elastic4s.requests.searches.aggs.{
+ Aggregation,
+ AvgAggregation,
+ KeyedFiltersAggregation,
+ MaxAggregation,
+ MinAggregation,
+ SumAggregation,
+ TermsAggregation,
+ TermsOrder
+}
+import com.sksamuel.elastic4s.requests.searches.aggs.responses.HasAggregations
+import com.sksamuel.elastic4s.requests.searches.aggs.responses.bucket.Terms
+import com.sksamuel.elastic4s.requests.searches.aggs.responses.metrics.TopHits
+import org.elastic4play.BadRequestError
+import org.elastic4play.database.DBUtils
+import org.elastic4play.models.BaseModelDef
+
+abstract class Agg(val aggregationName: String) {
+ def apply(model: BaseModelDef): Seq[Aggregation]
+
+ def processResult(model: BaseModelDef, aggregations: HasAggregations): JsObject
+}
+
+abstract class FieldAgg(val fieldName: String, aggregationName: String, query: Option[QueryDef]) extends Agg(aggregationName) {
+ def script(s: String): Aggregation
+
+ def field(f: String): Aggregation
+
+ def getAggregation(fieldName: String, aggregations: HasAggregations, query: Option[QueryDef]): HasAggregations = {
+
+ val agg = query match {
+ case None => aggregations
+ case _ => aggregations.filter(aggregationName)
+ }
+
+ if (fieldName.startsWith("computed")) agg
+ else {
+ fieldName.split("\\.").init.foldLeft(agg) { (a, _) =>
+ a.nested(aggregationName)
+ }
+ }
+ }
+
+ def apply(model: BaseModelDef): Seq[Aggregation] = {
+ val aggs = fieldName.split("\\.") match {
+ case Array("computed", c) =>
+ val s = model.computedMetrics.getOrElse(c, throw BadRequestError(s"Field $fieldName is unknown in ${model.modelName}"))
+ Seq(script(s))
+ case array =>
+ if (array(0) != "" && !model.attributes.exists(_.attributeName == array(0))) {
+ throw BadRequestError(s"Field $fieldName is unknown in ${model.modelName}")
+ }
+ // TODO check attribute type
+ Seq(
+ fieldName
+ .split("\\.")
+ .toSeq
+ .init
+ .inits
+ .toSeq
+ .init
+ .foldLeft[Aggregation](field(fieldName)) { (agg, f) =>
+ nestedAggregation(aggregationName, f.mkString(".")).subaggs(agg)
+ }
+ )
+ }
+ query match {
+ case None => aggs
+ case Some(q) => Seq(filterAgg(aggregationName, q.query).subAggregations(aggs))
+ }
+ }
+}
+
+class SelectAvg(aggregationName: String, fieldName: String, query: Option[QueryDef]) extends FieldAgg(fieldName, aggregationName, query) {
+ def script(s: String): Aggregation = AvgAggregation(aggregationName).script(Script(s).lang("painless"))
+
+ def field(f: String): Aggregation = AvgAggregation(aggregationName).field(f)
+
+ def processResult(model: BaseModelDef, aggregations: HasAggregations): JsObject = {
+ val avg = getAggregation(fieldName, aggregations, query).avg(aggregationName)
+ val value = Try(JsNumber(avg.value)).toOption.getOrElse(JsNumber(0))
+ JsObject(Seq(avg.name -> value))
+ }
+}
+
+class SelectMin(aggregationName: String, fieldName: String, query: Option[QueryDef]) extends FieldAgg(fieldName, aggregationName, query) {
+ def script(s: String): Aggregation = MinAggregation(aggregationName).script(Script(s).lang("painless"))
+
+ def field(f: String): Aggregation = MinAggregation(aggregationName).field(f)
+
+ def processResult(model: BaseModelDef, aggregations: HasAggregations): JsObject = {
+ val min = getAggregation(fieldName, aggregations, query).min(aggregationName)
+ val value = min.value.fold(JsNumber(0))(m => JsNumber(m))
+ JsObject(Seq(min.name -> value))
+ }
+}
+
+class SelectMax(aggregationName: String, fieldName: String, query: Option[QueryDef]) extends FieldAgg(fieldName, aggregationName, query) {
+ def script(s: String): Aggregation = MaxAggregation(aggregationName).script(Script(s).lang("painless"))
+
+ def field(f: String): Aggregation = MaxAggregation(aggregationName).field(f)
+
+ def processResult(model: BaseModelDef, aggregations: HasAggregations): JsObject = {
+ val max = getAggregation(fieldName, aggregations, query).max(aggregationName)
+ val value = max.value.fold(JsNumber(0))(m => JsNumber(m))
+ JsObject(Seq(max.name -> value))
+ }
+}
+
+class SelectSum(aggregationName: String, fieldName: String, query: Option[QueryDef]) extends FieldAgg(fieldName, aggregationName, query) {
+ def script(s: String): Aggregation = SumAggregation(aggregationName).script(Script(s).lang("painless"))
+
+ def field(f: String): Aggregation = SumAggregation(aggregationName).field(f)
+
+ def processResult(model: BaseModelDef, aggregations: HasAggregations): JsObject = {
+ val sum = getAggregation(fieldName, aggregations, query).sum(aggregationName)
+ val value = JsNumber(sum.value)
+ JsObject(Seq(sum.name -> value))
+ }
+}
+
+class SelectCount(aggregationName: String, query: Option[QueryDef]) extends FieldAgg("", aggregationName, query) {
+ def script(s: String): Aggregation = ???
+
+ def field(f: String): Aggregation = filterAgg(aggregationName, matchAllQuery())
+
+ def processResult(model: BaseModelDef, aggregations: HasAggregations): JsObject = {
+ val count = aggregations.filter(aggregationName)
+ JsObject(Seq(count.name -> JsNumber(count.docCount)))
+ }
+}
+
+class SelectTop(aggregationName: String, size: Int, sortBy: Seq[String], query: Option[QueryDef] = None)
+ extends FieldAgg("", aggregationName, query) {
+ def script(s: String): Aggregation = ???
+
+ def field(f: String): Aggregation = topHitsAgg(aggregationName).size(size).sortBy(DBUtils.sortDefinition(sortBy))
+
+ def processResult(model: BaseModelDef, aggregations: HasAggregations): JsObject = {
+ val hits = aggregations.result[TopHits](aggregationName).hits.map { hit =>
+ val id = JsString(hit.id)
+ val body = Json.parse(JacksonSupport.mapper.writeValueAsString(hit.source)).as[JsObject]
+ val (parent, model) = (body \ "relations" \ "parent").asOpt[JsString] match {
+ case Some(p) => p -> (body \ "relations" \ "name").as[JsString]
+ case None => JsNull -> (body \ "relations").as[JsString]
+ }
+ body - "relations" +
+ ("_type" -> model) +
+ ("_parent" -> parent) +
+ ("_id" -> id)
+
+ }
+ Json.obj("top" -> hits)
+ }
+}
+
+class GroupByCategory(aggregationName: String, categories: Map[String, QueryDef], subAggs: Seq[Agg]) extends Agg(aggregationName) {
+
+ def apply(model: BaseModelDef): Seq[KeyedFiltersAggregation] = {
+ val filters = categories.view.mapValues(_.query).toMap
+ val subAggregations = subAggs.flatMap(_.apply(model))
+ Seq(KeyedFiltersAggregation(aggregationName, filters).subAggregations(subAggregations))
+ }
+
+ def processResult(model: BaseModelDef, aggregations: HasAggregations): JsObject = {
+ val filters = aggregations.keyedFilters(aggregationName)
+ JsObject {
+ categories.keys.toSeq.map { cat =>
+ val subAggResults = filters.aggResults(cat)
+ cat -> subAggs
+ .map(_.processResult(model, subAggResults))
+ .reduceOption(_ ++ _)
+ .getOrElse(JsObject.empty)
+ }
+ }
+ }
+}
+
+class GroupByTime(aggregationName: String, fields: Seq[String], interval: String, subAggs: Seq[Agg]) extends Agg(aggregationName) {
+
+ def apply(model: BaseModelDef): Seq[Aggregation] =
+ fields.map { fieldName =>
+ val dateHistoAgg = dateHistogramAgg(s"${aggregationName}_$fieldName", fieldName)
+ .calendarInterval(DateHistogramInterval.fromString(interval))
+ .subAggregations(subAggs.flatMap(_.apply(model)))
+ fieldName
+ .split("\\.")
+ .toSeq
+ .init
+ .inits
+ .toSeq
+ .init
+ .foldLeft[Aggregation](dateHistoAgg) { (agg, f) =>
+ nestedAggregation(aggregationName, f.mkString(".")).subaggs(agg)
+ }
+ }
+
+ def processResult(model: BaseModelDef, aggregations: HasAggregations): JsObject = {
+ val aggs = fields.map { fieldName =>
+ val agg = fieldName.split("\\.").init.foldLeft(aggregations) { (a, _) =>
+ a.nested(aggregationName)
+ }
+
+ val buckets = agg.histogram(s"${aggregationName}_$fieldName").buckets
+ fieldName -> buckets.map { bucket =>
+ val results = subAggs
+ .map(_.processResult(model, bucket))
+ .reduceOption(_ ++ _)
+ .getOrElse(JsObject.empty)
+ // date → obj(key{avg, min} → value)
+ bucket.key -> results
+ }.toMap
+ }.toMap
+ val keys = aggs.values.flatMap(_.keys).toSet
+ JsObject {
+ keys.map { date =>
+ date -> JsObject(aggs.map {
+ case (df, values) =>
+ df -> values.getOrElse(date, JsObject.empty)
+ })
+ }.toMap
+ }
+ }
+}
+
+class GroupByField(aggregationName: String, fieldName: String, size: Option[Int], sortBy: Seq[String], subAggs: Seq[Agg])
+ extends Agg(aggregationName) {
+ private def setSize(agg: TermsAggregation): TermsAggregation =
+ size.fold(agg)(s => agg.size(s))
+
+ private def setOrder(agg: TermsAggregation): TermsAggregation = {
+ val sortDefinition = sortBy
+ .flatMap {
+ case "_count" | "+_count" => Seq(TermsOrder("_count", asc = true))
+ case "-_count" => Seq(TermsOrder("_count", asc = false))
+ case "_term" | "+_term" => Seq(TermsOrder("_key", asc = true))
+ case "-_term" => Seq(TermsOrder("_key", asc = false))
+ case f if f.startsWith("+") => Seq(TermsOrder(f.drop(1), asc = true))
+ case f if f.startsWith("-") => Seq(TermsOrder(f.drop(1), asc = false))
+ case f if f.length() > 0 => Seq(TermsOrder(f, asc = true))
+ case _ => Nil
+ }
+ if (sortDefinition.nonEmpty)
+ agg.order(sortDefinition)
+ else
+ agg
+ }
+
+ def apply(model: BaseModelDef): Seq[Aggregation] = {
+ val agg = setSize(setOrder(termsAgg(s"${aggregationName}_$fieldName", fieldName).subAggregations(subAggs.flatMap(_.apply(model)))))
+ Seq(
+ fieldName
+ .split("\\.")
+ .toSeq
+ .init
+ .inits
+ .toSeq
+ .init
+ .foldLeft[Aggregation](agg) { (agg, f) =>
+ nestedAggregation(aggregationName, f.mkString(".")).subaggs(agg)
+ }
+ )
+ }
+
+ def processResult(model: BaseModelDef, aggregations: HasAggregations): JsObject = {
+ val buckets = fieldName
+ .split("\\.")
+ .init
+ .foldLeft(aggregations) { (a, _) =>
+ a.nested(aggregationName)
+ }
+ .result[Terms](s"${aggregationName}_$fieldName")
+ .buckets
+ JsObject {
+ buckets.map { bucket =>
+ val results = subAggs
+ .map(_.processResult(model, bucket))
+ .reduceOption(_ ++ _)
+ .getOrElse(JsObject.empty)
+ bucket.key -> results
+ }.toMap
+ }
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/services/AttachmentSrv.scala b/elastic4play/app/org/elastic4play/services/AttachmentSrv.scala
new file mode 100644
index 000000000..297be8fea
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/AttachmentSrv.scala
@@ -0,0 +1,235 @@
+package org.elastic4play.services
+
+import java.io.InputStream
+import java.nio.file.Files
+
+import akka.NotUsed
+import akka.actor.ActorSystem
+import akka.stream.Materializer
+import akka.stream.scaladsl.{FileIO, Sink, Source, StreamConverters}
+import akka.util.ByteString
+import com.sksamuel.elastic4s.ElasticDsl.search
+import javax.inject.{Inject, Singleton}
+import org.elastic4play.controllers.JsonFormat.{attachmentInputValueReads, fileInputValueFormat}
+import org.elastic4play.controllers.{AttachmentInputValue, FileInputValue, JsonInputValue}
+import org.elastic4play.database.{DBCreate, DBFind, DBRemove}
+import org.elastic4play.models.{AttributeDef, BaseModelDef, EntityDef, ModelDef, AttributeFormat => F}
+import org.elastic4play.services.JsonFormat.attachmentFormat
+import org.elastic4play.utils.{Hash, Hasher, Retry}
+import org.elastic4play.{AttributeCheckingError, InvalidFormatAttributeError, MissingAttributeError}
+import play.api.Configuration
+import play.api.libs.json.JsValue.jsValueToJsLookup
+import play.api.libs.json.Json.toJsFieldJsValueWrapper
+import play.api.libs.json._
+
+import scala.concurrent.duration.DurationInt
+import scala.concurrent.{ExecutionContext, Future}
+
+case class Attachment(name: String, hashes: Seq[Hash], size: Long, contentType: String, id: String)
+
+object Attachment {
+
+ def apply(id: String, hashes: Seq[Hash], fiv: FileInputValue): Attachment =
+ Attachment(fiv.name, hashes, Files.size(fiv.filepath), fiv.contentType, id)
+}
+
+trait AttachmentAttributes {
+ _: AttributeDef =>
+ val data: A[Array[Byte]] = attribute("binary", F.binaryFmt, "data")
+}
+
+@Singleton
+class AttachmentModel(datastoreName: String)
+ extends ModelDef[AttachmentModel, AttachmentChunk](datastoreName, "Attachment", "/datastore")
+ with AttachmentAttributes {
+ @Inject() def this(configuration: Configuration) = this(configuration.get[String]("datastore.name"))
+}
+
+class AttachmentChunk(model: AttachmentModel, attributes: JsObject)
+ extends EntityDef[AttachmentModel, AttachmentChunk](model, attributes)
+ with AttachmentAttributes
+
+@Singleton
+class AttachmentSrv(
+ mainHash: String,
+ extraHashes: Seq[String],
+ chunkSize: Int,
+ dbCreate: DBCreate,
+ dbRemove: DBRemove,
+ dbFind: DBFind,
+ getSrv: GetSrv,
+ findSrv: FindSrv,
+ attachmentModel: AttachmentModel,
+ implicit val system: ActorSystem,
+ implicit val mat: Materializer
+) {
+
+ @Inject() def this(
+ configuration: Configuration,
+ dbCreate: DBCreate,
+ dbRemove: DBRemove,
+ getSrv: GetSrv,
+ dbFind: DBFind,
+ findSrv: FindSrv,
+ attachmentModel: AttachmentModel,
+ system: ActorSystem,
+ mat: Materializer
+ ) =
+ this(
+ configuration.get[String]("datastore.hash.main"),
+ configuration.get[Seq[String]]("datastore.hash.extra"),
+ configuration.underlying.getBytes("datastore.chunksize").toInt,
+ dbCreate,
+ dbRemove,
+ dbFind,
+ getSrv,
+ findSrv,
+ attachmentModel,
+ system,
+ mat
+ )
+
+ val mainHasher: Hasher = Hasher(mainHash)
+ val extraHashers: Hasher = Hasher(mainHash +: extraHashes: _*)
+
+ /**
+ * Handles attachments : send to datastore and build an object with hash and filename
+ */
+ def apply(model: BaseModelDef)(attributes: JsObject)(implicit ec: ExecutionContext): Future[JsObject] =
+ // find all declared attribute as attachment in submitted data
+ model.attachmentAttributes.foldLeft(Future.successful(attributes)) {
+ case (attrs, (name, isRequired)) =>
+ attrs.flatMap { a =>
+ // try to convert in FileInputValue Scala Object
+ val inputValue = (a \ name).asOpt[FileInputValue] orElse (a \ name).asOpt[AttachmentInputValue](attachmentInputValueReads)
+ inputValue
+ .map {
+ // save attachment and replace FileInputValue json representation to JsObject containing attachment attributes
+ case fiv: FileInputValue =>
+ save(fiv).map { attachment =>
+ a - name + (name -> Json.toJson(attachment))
+ }
+ case aiv: AttachmentInputValue => Future.successful(a - name + (name -> Json.toJson(aiv.toAttachment)))
+ }
+ // if conversion to FileInputValue fails, it means that attribute is missing or format is invalid
+ .getOrElse {
+ (a \ name).asOpt[JsValue] match {
+ case Some(v) if v != JsNull && v != JsArray(Nil) =>
+ Future.failed(
+ AttributeCheckingError(
+ model.modelName,
+ Seq(
+ InvalidFormatAttributeError(
+ name,
+ "attachment",
+ (a \ name).asOpt[FileInputValue].getOrElse(JsonInputValue((a \ name).as[JsValue]))
+ )
+ )
+ )
+ )
+ case _ =>
+ if (isRequired)
+ Future.failed(AttributeCheckingError(model.modelName, Seq(MissingAttributeError(name))))
+ else
+ Future.successful(a)
+ }
+ }
+ }
+ }
+
+ def save(filename: String, contentType: String, data: Array[Byte])(implicit ec: ExecutionContext): Future[Attachment] = {
+ val hash = mainHasher.fromByteArray(data).head.toString()
+ val hashes = extraHashers.fromByteArray(data)
+
+ for {
+ attachment <- Retry()(classOf[Exception]) {
+ getSrv[AttachmentModel, AttachmentChunk](attachmentModel, hash + "_0")
+ .fallbackTo { // it it doesn't exist, create it
+ Source
+ .fromIterator(() => data.grouped(chunkSize))
+ .zip(Source.unfold(0)(i => Some((i + 1) -> i)))
+ .mapAsync(5) {
+ case (buffer, index) =>
+ val data = java.util.Base64.getEncoder.encodeToString(buffer)
+ dbCreate(attachmentModel.modelName, None, Json.obj("binary" -> data, "_id" -> s"${hash}_$index"))
+ }
+ .runWith(Sink.ignore)
+ }
+ .map(_ => Attachment(filename, hashes, data.length, contentType, hash))
+ }
+ } yield attachment
+ }
+
+ def save(fiv: FileInputValue)(implicit ec: ExecutionContext): Future[Attachment] =
+ for {
+ hash <- mainHasher.fromPath(fiv.filepath).map(_.head.toString())
+ hashes <- extraHashers.fromPath(fiv.filepath)
+ attachment <- Retry()(classOf[Exception]) {
+ getSrv[AttachmentModel, AttachmentChunk](attachmentModel, hash + "_0")
+ .fallbackTo { // it it doesn't exist, create it
+ FileIO
+ .fromPath(fiv.filepath, chunkSize)
+ .zip(Source.fromIterator { () =>
+ Iterator.iterate(0)(_ + 1)
+ })
+ .mapAsync(5) {
+ case (buffer, index) =>
+ val data = java.util.Base64.getEncoder.encodeToString(buffer.toArray)
+ dbCreate(attachmentModel.modelName, None, Json.obj("binary" -> data, "_id" -> s"${hash}_$index"))
+ }
+ .runWith(Sink.ignore)
+ }
+ .map { _ =>
+ Attachment(hash, hashes, fiv)
+ }
+ }
+ } yield attachment
+
+ def source(id: String)(implicit ec: ExecutionContext): Source[ByteString, NotUsed] =
+ Source.unfoldAsync(0) { chunkNumber =>
+ getSrv[AttachmentModel, AttachmentChunk](attachmentModel, s"${id}_$chunkNumber")
+ .map { entity =>
+ Some((chunkNumber + 1, ByteString(entity.data())))
+ }
+ .recover { case _ => None }
+ }
+
+ def stream(id: String)(implicit ec: ExecutionContext): InputStream = source(id).runWith(StreamConverters.asInputStream(1.minute))
+
+ def getHashes(id: String)(implicit ec: ExecutionContext): Future[Seq[Hash]] = extraHashers.fromSource(source(id))
+
+ def getSize(id: String)(implicit ec: ExecutionContext): Future[Int] = source(id).map(_.size).runFold(0)(_ + _)
+
+ def attachmentUseCount(attachmentId: String)(implicit ec: ExecutionContext): Future[Long] = {
+ import org.elastic4play.services.QueryDSL._
+ findSrv(None, "attachment.id" ~= attachmentId, Some("0-0"), Nil)._2
+ }
+
+ def delete(id: String)(implicit ec: ExecutionContext): Future[Unit] = {
+ def removeChunks(chunkNumber: Int = 0): Future[Unit] =
+ getSrv[AttachmentModel, AttachmentChunk](attachmentModel, s"${id}_$chunkNumber")
+ .map { chunk =>
+ dbRemove(chunk)
+ }
+ .flatMap { _ =>
+ removeChunks(chunkNumber + 1)
+ }
+
+ removeChunks().recover { case _ => () }
+ }
+
+ def cleanup(implicit ec: ExecutionContext): Future[Unit] =
+ dbFind(Some("all"), Nil)(index => search(index).matchQuery("relations", attachmentModel.modelName).fetchSource(false))
+ ._1
+ .mapConcat(o => (o \ "_id").asOpt[String].toList)
+ .collect { case id if id.endsWith("_0") => id.dropRight(2) }
+ .mapAsync(1) { id =>
+ attachmentUseCount(id).map(id -> _)
+ }
+ .mapAsync(1) {
+ case (id, 0L) => delete(id)
+ case _ => Future.successful(())
+ }
+ .runWith(Sink.ignore)
+ .map(_ => ())
+}
diff --git a/elastic4play/app/org/elastic4play/services/AuxSrv.scala b/elastic4play/app/org/elastic4play/services/AuxSrv.scala
new file mode 100644
index 000000000..fd84243fd
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/AuxSrv.scala
@@ -0,0 +1,89 @@
+package org.elastic4play.services
+
+import akka.stream.Materializer
+import akka.stream.scaladsl.{Sink, Source}
+import javax.inject.{Inject, Singleton}
+import org.elastic4play.InternalError
+import org.elastic4play.models.{AttributeOption, BaseEntity, ChildModelDef}
+import play.api.Logger
+import play.api.libs.json.JsObject
+
+import scala.concurrent.{ExecutionContext, Future}
+
+@Singleton
+class AuxSrv @Inject() (findSrv: FindSrv, modelSrv: ModelSrv, implicit val mat: Materializer) {
+
+ import org.elastic4play.services.QueryDSL._
+
+ private[AuxSrv] lazy val logger = Logger(getClass)
+
+ def filterAttributes(entity: BaseEntity, filter: Seq[AttributeOption.Type] => Boolean): JsObject =
+ entity.model.attributes.foldLeft(entity.toJson) {
+ case (json, attribute) if !filter(attribute.options) => json - attribute.attributeName
+ case (json, _) => json
+ }
+
+ def apply(entity: BaseEntity, nparent: Int, withStats: Boolean, removeUnaudited: Boolean)(implicit ec: ExecutionContext): Future[JsObject] =
+ apply(entity, nparent, withStats, opts => !removeUnaudited || !opts.contains(AttributeOption.unaudited))
+
+ def apply(entity: BaseEntity, nparent: Int, withStats: Boolean, filter: Seq[AttributeOption.Type] => Boolean)(
+ implicit ec: ExecutionContext
+ ): Future[JsObject] = {
+ val entityWithParent = entity.model match {
+ case childModel: ChildModelDef[_, _, _, _] if nparent > 0 =>
+ val (src, _) = findSrv(
+ childModel.parentModel,
+ "_id" ~= entity.parentId.getOrElse(throw InternalError(s"Child entity $entity has no parent ID")),
+ Some("0-1"),
+ Nil
+ )
+ src
+ .mapAsync(1) { parent =>
+ apply(parent, nparent - 1, withStats, filter).map { parent =>
+ val entityObj = filterAttributes(entity, filter)
+ entityObj + (childModel.parentModel.modelName -> parent)
+ }
+ }
+ .runWith(Sink.headOption)
+ .map(_.getOrElse {
+ logger.warn(s"Child entity (${childModel.modelName} ${entity.id}) has no parent !")
+ JsObject.empty
+ })
+ case _ => Future.successful(filterAttributes(entity, filter))
+ }
+ if (withStats) {
+ for {
+ e <- entityWithParent
+ s <- entity.model.getStats(entity)
+ } yield e + ("stats" -> s)
+ } else entityWithParent
+ }
+
+ def apply[A](entities: Source[BaseEntity, A], nparent: Int, withStats: Boolean, removeUnaudited: Boolean)(
+ implicit ec: ExecutionContext
+ ): Source[JsObject, A] =
+ entities.mapAsync(5) { entity =>
+ apply(entity, nparent, withStats, removeUnaudited)
+ }
+
+ def apply(modelName: String, entityId: String, nparent: Int, withStats: Boolean, removeUnaudited: Boolean)(
+ implicit ec: ExecutionContext
+ ): Future[JsObject] = {
+ if (entityId == "")
+ return Future.successful(JsObject.empty)
+ modelSrv(modelName)
+ .map { model =>
+ val (src, _) = findSrv(model, "_id" ~= entityId, Some("0-1"), Nil)
+ src
+ .mapAsync(1) { entity =>
+ apply(entity, nparent, withStats, removeUnaudited)
+ }
+ .runWith(Sink.headOption)
+ .map(_.getOrElse {
+ logger.warn(s"Entity $modelName $entityId not found")
+ JsObject.empty
+ })
+ }
+ .getOrElse(Future.failed(InternalError(s"Model $modelName not found")))
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/services/CreateSrv.scala b/elastic4play/app/org/elastic4play/services/CreateSrv.scala
new file mode 100644
index 000000000..bcbe7f883
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/CreateSrv.scala
@@ -0,0 +1,117 @@
+package org.elastic4play.services
+
+import java.util.Date
+
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.Try
+
+import play.api.libs.json.JsValue.jsValueToJsLookup
+import play.api.libs.json.Json.toJsFieldJsValueWrapper
+import play.api.libs.json.{JsNull, JsObject, JsValue, Json}
+
+import javax.inject.{Inject, Singleton}
+import org.scalactic.Accumulation.convertGenTraversableOnceToValidatable
+import org.scalactic.Every.everyToGenTraversableOnce
+import org.scalactic.{Bad, One}
+
+import org.elastic4play.JsonFormat.dateFormat
+import org.elastic4play.controllers.Fields
+import org.elastic4play.database.DBCreate
+import org.elastic4play.models._
+import org.elastic4play.utils.{RichFuture, RichOr}
+import org.elastic4play.{AttributeCheckingError, UnknownAttributeError}
+
+@Singleton
+class CreateSrv @Inject() (
+ fieldsSrv: FieldsSrv,
+ dbCreate: DBCreate,
+ eventSrv: EventSrv,
+ attachmentSrv: AttachmentSrv
+) {
+
+ /**
+ * Check if entity attributes are valid. Format is not checked as it has been already checked.
+ */
+ private[services] def checkAttributes(attrs: JsObject, model: BaseModelDef) =
+ (attrs.keys ++ model.modelAttributes.keySet)
+ .map { name =>
+ (name, (attrs \ name).asOpt[JsValue], model.modelAttributes.get(name))
+ }
+ .validatedBy {
+ case (name, value, Some(attr)) => attr.validateForCreation(value).map(name -> _)
+ case (name, maybeValue, _) => Bad(One(UnknownAttributeError(name, maybeValue.getOrElse(JsNull))))
+ }
+ .map(_.collect {
+ case (name, Some(value)) => name -> value
+ })
+ .fold(attrs => Future.successful(JsObject(attrs.toSeq)), errors => Future.failed(AttributeCheckingError(model.modelName, errors)))
+
+ private[services] def processAttributes(model: BaseModelDef, parent: Option[BaseEntity], attributes: JsObject)(
+ implicit authContext: AuthContext,
+ ec: ExecutionContext
+ ): Future[JsObject] =
+ for {
+ attributesAfterHook <- model.creationHook(parent, addMetaFields(attributes))
+ checkedAttributes <- checkAttributes(attributesAfterHook, model)
+ attributesWithAttachment <- attachmentSrv(model)(checkedAttributes)
+ } yield attributesWithAttachment
+
+ private[services] def addMetaFields(attrs: JsObject)(implicit authContext: AuthContext): JsObject =
+ attrs ++
+ Json.obj("createdBy" -> authContext.userId, "createdAt" -> Json.toJson(new Date))
+
+ private[services] def removeMetaFields(attrs: JsObject): JsObject = attrs - "createdBy" - "createdAt"
+
+ def apply[M <: ModelDef[M, E], E <: EntityDef[M, E]](model: M, fields: Fields)(implicit authContext: AuthContext, ec: ExecutionContext): Future[E] =
+ for {
+ entityAttr <- create(model, None, fields) //dbCreate(model.name, None, attributesWithAttachment)
+ entity = model(entityAttr)
+ _ = eventSrv.publish(AuditOperation(entity, AuditableAction.Creation, removeMetaFields(entityAttr), authContext))
+ } yield entity
+
+ def apply[M <: ModelDef[M, E], E <: EntityDef[M, E]](
+ model: M,
+ fieldSet: Seq[Fields]
+ )(implicit authContext: AuthContext, ec: ExecutionContext): Future[Seq[Try[E]]] =
+ Future.sequence(fieldSet.map { fields =>
+ create(model, None, fields).map { attr =>
+ val entity = model(attr)
+ eventSrv.publish(AuditOperation(entity, AuditableAction.Creation, removeMetaFields(attr), authContext))
+ entity
+ }.toTry
+ })
+
+ def apply[M <: ChildModelDef[M, E, _, PE], E <: EntityDef[M, E], PE <: BaseEntity](model: M, parent: PE, fields: Fields)(
+ implicit authContext: AuthContext,
+ ec: ExecutionContext
+ ): Future[E] =
+ for {
+ entityAttr <- create(model, Some(parent), fields)
+ entity = model(entityAttr)
+ _ = eventSrv.publish(AuditOperation(entity, AuditableAction.Creation, removeMetaFields(entityAttr), authContext))
+ } yield entity
+
+ def apply[M <: ChildModelDef[M, E, _, PE], E <: EntityDef[M, E], PE <: BaseEntity](model: M, fieldSet: Seq[(PE, Fields)])(
+ implicit authContext: AuthContext,
+ ec: ExecutionContext
+ ): Future[Seq[Try[E]]] =
+ Future.sequence(fieldSet.map {
+ case (parent, fields) =>
+ create(model, Some(parent), fields).map { attr =>
+ val entity = model(attr)
+ eventSrv.publish(AuditOperation(entity, AuditableAction.Creation, removeMetaFields(attr), authContext))
+ entity
+ }.toTry
+
+ })
+
+ private[services] def create(model: BaseModelDef, parent: Option[BaseEntity], fields: Fields)(
+ implicit authContext: AuthContext,
+ ec: ExecutionContext
+ ): Future[JsObject] =
+ for {
+ attrs <- fieldsSrv.parse(fields, model).toFuture
+ attributesWithAttachment <- processAttributes(model, parent, attrs)
+ entityAttr <- dbCreate(model.modelName, parent, attributesWithAttachment)
+ } yield entityAttr
+}
diff --git a/elastic4play/app/org/elastic4play/services/DBList.scala b/elastic4play/app/org/elastic4play/services/DBList.scala
new file mode 100644
index 000000000..6c39ed32c
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/DBList.scala
@@ -0,0 +1,141 @@
+package org.elastic4play.services
+
+import javax.inject.{Inject, Provider, Singleton}
+
+import scala.collection.immutable
+import scala.concurrent.duration.DurationInt
+import scala.concurrent.{ExecutionContext, Future}
+
+import play.api.Configuration
+import play.api.cache.AsyncCacheApi
+import play.api.libs.json.JsValue.jsValueToJsLookup
+import play.api.libs.json.Json.toJsFieldJsValueWrapper
+import play.api.libs.json._
+
+import akka.NotUsed
+import akka.stream.Materializer
+import akka.stream.scaladsl.{Sink, Source}
+
+import org.elastic4play.database.DBCreate
+import org.elastic4play.models.{Attribute, EntityDef, ModelDef, AttributeFormat => F}
+import org.elastic4play.utils.{Hasher, RichFuture}
+
+@Singleton
+class DBListModel(dblistName: String) extends ModelDef[DBListModel, DBListItemEntity](dblistName, "DBList", "/list") {
+ model =>
+ @Inject def this(configuration: Configuration) = this(configuration.get[String]("dblist.name"))
+
+ val value: Attribute[String] = attribute("value", F.stringFmt, "Content of the dblist item")
+ val dblist: Attribute[String] = attribute("dblist", F.stringFmt, "Name of the dblist")
+
+ override def apply(attributes: JsObject) = new DBListItemEntity(this, attributes)
+
+}
+
+class DBListItemEntity(model: DBListModel, attributes: JsObject) extends EntityDef[DBListModel, DBListItemEntity](model, attributes) with DBListItem {
+ def mapTo[T](implicit reads: Reads[T]): T = Json.parse((attributes \ "value").as[String]).as[T]
+
+ def dblist: String = (attributes \ "dblist").as[String]
+
+ override def toJson: JsObject = super.toJson - "value" + ("value" -> mapTo[JsValue])
+}
+
+trait DBListItem {
+ def id: String
+
+ def dblist: String
+
+ def mapTo[A](implicit reads: Reads[A]): A
+}
+
+trait DBList {
+ def cachedItems: Seq[DBListItem]
+
+ def getItems(): (Source[DBListItem, NotUsed], Future[Long])
+
+ def getItems[A: Reads]: (Source[(String, A), NotUsed], Future[Long])
+
+ def addItem[A: Writes](item: A): Future[DBListItem]
+
+ def exists(key: String, value: JsValue): Future[Boolean]
+}
+
+@Singleton
+class DBLists @Inject() (
+ getSrv: GetSrv,
+ findSrv: FindSrv,
+ deleteSrv: Provider[DeleteSrv],
+ dbCreate: DBCreate,
+ dblistModel: DBListModel,
+ cache: AsyncCacheApi,
+ implicit val ec: ExecutionContext,
+ implicit val mat: Materializer
+) {
+
+ /**
+ * Returns list of all dblist name
+ */
+ def listAll: Future[collection.Set[String]] = {
+ import org.elastic4play.services.QueryDSL._
+ findSrv(dblistModel, any, groupByField("dblist", selectCount)).map(_.keys)
+ }
+
+ def deleteItem(itemId: String)(implicit authContext: AuthContext): Future[Unit] =
+ getItem(itemId).flatMap(deleteItem)
+
+ def deleteItem(item: DBListItemEntity)(implicit authContext: AuthContext): Future[Unit] =
+ for {
+ _ <- deleteSrv.get.realDelete(item)
+ _ = cache.remove(dblistModel.modelName + "_" + item.dblist)
+ } yield ()
+
+ def getItem(itemId: String): Future[DBListItemEntity] = getSrv[DBListModel, DBListItemEntity](dblistModel, itemId)
+
+ def apply(name: String): DBList = new DBList {
+
+ def cachedItems: immutable.Seq[DBListItem] =
+ cache
+ .getOrElseUpdate(dblistModel.modelName + "_" + name, 10.seconds) {
+ val (src, _) = getItems()
+ src.runWith(Sink.seq)
+ }
+ .await
+
+ def getItems(): (Source[DBListItem, NotUsed], Future[Long]) = {
+ import org.elastic4play.services.QueryDSL._
+ findSrv[DBListModel, DBListItemEntity](dblistModel, "dblist" ~= name, Some("all"), Nil)
+ }
+
+ override def getItems[A: Reads]: (Source[(String, A), NotUsed], Future[Long]) = {
+ val (src, total) = getItems()
+ val items = src.map(item => (item.id, item.mapTo[A]))
+ (items, total)
+ }
+
+ override def addItem[A: Writes](item: A): Future[DBListItem] = {
+ val value = Json.toJson(item)
+ val id = Hasher("MD5").fromString(value.toString).head.toString
+ dbCreate(dblistModel.modelName, None, Json.obj("_id" -> id, "dblist" -> name, "value" -> JsString(value.toString)))
+ .map { newItem =>
+ cache.remove(dblistModel.modelName + "_" + name)
+ dblistModel(newItem)
+ }
+ }
+
+ def exists(key: String, value: JsValue): Future[Boolean] =
+ getItems()
+ ._1
+ .filter { item =>
+ item
+ .mapTo[JsValue]
+ .asOpt[JsObject]
+ .flatMap { obj =>
+ (obj \ key).asOpt[JsValue]
+ }
+ .contains(value)
+ }
+ .runWith(Sink.headOption)
+ .map(_.isDefined)
+ }
+
+}
diff --git a/elastic4play/app/org/elastic4play/services/DeleteSrv.scala b/elastic4play/app/org/elastic4play/services/DeleteSrv.scala
new file mode 100644
index 000000000..e93cec13b
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/DeleteSrv.scala
@@ -0,0 +1,40 @@
+package org.elastic4play.services
+
+import javax.inject.{Inject, Singleton}
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.Success
+
+import play.api.libs.json.JsObject
+
+import org.elastic4play.NotFoundError
+import org.elastic4play.database.{DBRemove, ModifyConfig}
+import org.elastic4play.models.{AbstractModelDef, BaseEntity, EntityDef}
+
+@Singleton
+class DeleteSrv @Inject() (updateSrv: UpdateSrv, getSrv: GetSrv, dbremove: DBRemove, eventSrv: EventSrv) {
+
+ def apply[M <: AbstractModelDef[M, E], E <: EntityDef[M, E]](
+ model: M,
+ id: String
+ )(implicit authContext: AuthContext, ec: ExecutionContext): Future[E] =
+ getSrv[M, E](model, id).flatMap(entity => apply(entity))
+
+ def apply[E <: BaseEntity](entity: E)(implicit authContext: AuthContext, ec: ExecutionContext): Future[E] =
+ updateSrv
+ .doUpdate(entity, entity.model.removeAttribute, ModifyConfig.default)
+ .andThen {
+ case Success(newEntity) => eventSrv.publish(AuditOperation(newEntity, AuditableAction.Delete, JsObject.empty, authContext))
+ }
+
+ def realDelete[M <: AbstractModelDef[M, E], E <: EntityDef[M, E]](
+ model: M,
+ id: String
+ )(implicit authContext: AuthContext, ec: ExecutionContext): Future[Unit] =
+ getSrv[M, E](model, id).flatMap(entity => realDelete(entity))
+
+ def realDelete[E <: BaseEntity](entity: E)(implicit authContext: AuthContext, ec: ExecutionContext): Future[Unit] =
+ dbremove(entity).map { isFound =>
+ if (isFound) eventSrv.publish(AuditOperation(entity, AuditableAction.Delete, entity.toJson, authContext))
+ else throw NotFoundError(s"$entity.model.modelName} ${entity.id} not found")
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/services/EventSrv.scala b/elastic4play/app/org/elastic4play/services/EventSrv.scala
new file mode 100644
index 000000000..8291d6d90
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/EventSrv.scala
@@ -0,0 +1,56 @@
+package org.elastic4play.services
+
+import akka.actor.ActorRef
+import akka.event.{ActorEventBus, SubchannelClassification}
+import akka.stream.Materializer
+import akka.util.Subclassification
+import org.elastic4play.models.{BaseEntity, HiveEnumeration}
+import play.api.Logger
+import play.api.libs.json.JsObject
+import play.api.mvc.{Filter, RequestHeader, Result}
+
+import java.util.Date
+import javax.inject.{Inject, Singleton}
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.Try
+
+trait EventMessage
+
+object AuditableAction extends Enumeration with HiveEnumeration {
+ type Type = Value
+ val Update, Creation, Delete, Get = Value
+}
+
+case class RequestProcessStart(request: RequestHeader) extends EventMessage
+case class RequestProcessEnd(request: RequestHeader, result: Try[Result]) extends EventMessage
+case class InternalRequestProcessStart(requestId: String) extends EventMessage
+case class InternalRequestProcessEnd(requestId: String) extends EventMessage
+
+case class AuditOperation(entity: BaseEntity, action: AuditableAction.Type, details: JsObject, authContext: AuthContext, date: Date = new Date())
+ extends EventMessage
+
+@Singleton
+class EventFilter @Inject() (eventSrv: EventSrv, implicit val mat: Materializer, implicit val ec: ExecutionContext) extends Filter {
+
+ override def apply(nextFilter: RequestHeader => Future[Result])(requestHeader: RequestHeader): Future[Result] = {
+ eventSrv.publish(RequestProcessStart(requestHeader))
+ nextFilter(requestHeader).andThen {
+ case result => eventSrv.publish(RequestProcessEnd(requestHeader, result))
+ }
+ }
+}
+
+@Singleton
+class EventSrv extends ActorEventBus with SubchannelClassification {
+ private[EventSrv] lazy val logger = Logger(getClass)
+ override type Classifier = Class[_ <: EventMessage]
+ override type Event = EventMessage
+
+ override protected def classify(event: EventMessage): Classifier = event.getClass
+ override protected def publish(event: EventMessage, subscriber: ActorRef): Unit = subscriber ! event
+
+ implicit protected def subclassification: Subclassification[Classifier] = new Subclassification[Classifier] {
+ def isEqual(x: Classifier, y: Classifier): Boolean = x == y
+ def isSubclass(x: Classifier, y: Classifier): Boolean = y.isAssignableFrom(x)
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/services/ExecutionContextSrv.scala b/elastic4play/app/org/elastic4play/services/ExecutionContextSrv.scala
new file mode 100644
index 000000000..43f9cab3e
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/ExecutionContextSrv.scala
@@ -0,0 +1,35 @@
+package org.elastic4play.services
+
+import akka.actor.ActorSystem
+import javax.inject.{Inject, Singleton}
+import play.api.Logger
+import play.api.cache.SyncCacheApi
+
+import scala.concurrent.ExecutionContext
+import scala.util.Try
+
+@Singleton
+class ExecutionContextSrv @Inject() (system: ActorSystem, syncCacheApi: SyncCacheApi) {
+ lazy val logger: Logger = Logger(getClass)
+ val default: ExecutionContext = system.dispatcher
+
+ def get(threadPoolName: String): ExecutionContext =
+ syncCacheApi.getOrElseUpdate(s"threadPool-$threadPoolName") {
+ Try(system.dispatchers.lookup(threadPoolName)).getOrElse {
+ logger.warn(s"""The configuration of thread pool $threadPoolName is not found. Fallback to default thread pool.
+ |In order to use a dedicated thread pool, add the following configuration in application.conf:
+ | $threadPoolName {
+ | fork-join-executor {
+ | # Number of threads = min(parallelism-max, max(parallelism-min, ceil(available processors * parallelism-factor)))
+ | parallelism-min = 1
+ | parallelism-factor = 2.0
+ | parallelism-max = 4
+ | }
+ | }
+ |""".stripMargin)
+ default
+ }
+ }
+ def withCustom[A](threadPoolName: String)(body: ExecutionContext => A): A = body(get(threadPoolName))
+ def withDefault[A](body: ExecutionContext => A): A = body(default)
+}
diff --git a/elastic4play/app/org/elastic4play/services/FieldsSrv.scala b/elastic4play/app/org/elastic4play/services/FieldsSrv.scala
new file mode 100644
index 000000000..79026e72f
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/FieldsSrv.scala
@@ -0,0 +1,34 @@
+package org.elastic4play.services
+
+import play.api.Logger
+import play.api.libs.json._
+
+import org.scalactic.Accumulation.convertGenTraversableOnceToValidatable
+import org.scalactic._
+
+import org.elastic4play.controllers.Fields
+import org.elastic4play.controllers.JsonFormat.inputValueFormat
+import org.elastic4play.models.BaseModelDef
+import org.elastic4play.{AttributeCheckingError, UnknownAttributeError}
+
+class FieldsSrv {
+ private[FieldsSrv] lazy val logger = Logger(getClass)
+
+ def parse(fields: Fields, model: BaseModelDef): JsObject Or AttributeCheckingError =
+ fields
+ .map {
+ case (name, value) =>
+ val names = name.split("\\.").toSeq
+ (name, names, value, model.formAttributes.get(names.head))
+ }
+ .validatedBy {
+ case (name, _, value, Some(_)) if value.jsonValue == JsNull || value.jsonValue == JsArray(Nil) => Good(name -> value.jsonValue)
+ case (name, names, value, Some(attr)) =>
+ attr
+ .format
+ .inputValueToJson(names.tail, value)
+ .transform(v => Good(name -> v), es => Bad(es.map(e => e.withName(model.modelName + "." + name))))
+ case (_, names, value, None) => Bad(One(UnknownAttributeError(model.modelName + "." + names.mkString("."), Json.toJson(value))))
+ }
+ .transform(attrs => Good(JsObject(attrs.toSeq)), errors => Bad(AttributeCheckingError(model.modelName, errors.toSeq)))
+}
diff --git a/elastic4play/app/org/elastic4play/services/FindSrv.scala b/elastic4play/app/org/elastic4play/services/FindSrv.scala
new file mode 100644
index 000000000..dbd0e4532
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/FindSrv.scala
@@ -0,0 +1,72 @@
+package org.elastic4play.services
+
+import akka.NotUsed
+import akka.stream.scaladsl.Source
+import com.sksamuel.elastic4s.ElasticDsl._
+import com.sksamuel.elastic4s.requests.searches.queries.Query
+import javax.inject.{Inject, Singleton}
+import org.elastic4play.database.{DBConfiguration, DBFind}
+import org.elastic4play.models.{AbstractModelDef, BaseEntity, BaseModelDef}
+import org.elastic4play.services.QueryDSL._
+import play.api.libs.json.JsObject
+import play.api.libs.json.JsValue.jsValueToJsLookup
+
+import scala.concurrent.{ExecutionContext, Future}
+
+case class QueryDef(query: Query)
+
+@Singleton
+class FindSrv @Inject() (dbfind: DBFind, modelSrv: ModelSrv) {
+
+ def switchTo(db: DBConfiguration) = new FindSrv(dbfind.switchTo(db), modelSrv)
+
+ def apply(
+ modelName: Option[String],
+ queryDef: QueryDef,
+ range: Option[String],
+ sortBy: Seq[String]
+ )(implicit ec: ExecutionContext): (Source[BaseEntity, NotUsed], Future[Long]) = {
+ val query = modelName.fold(queryDef)(m => and("relations" ~= m, queryDef)).query
+ val (src, total) = dbfind(range, sortBy)(indexName => search(indexName).query(query))
+ val entities = src.map { attrs =>
+ modelName match {
+ //case Some("audit") => auditModel.get()(attrs)
+ case Some(m) => modelSrv(m).getOrElse(sys.error("TODO"))(attrs)
+ case None =>
+ val tpe = (attrs \ "_type").asOpt[String].getOrElse(sys.error("TODO"))
+ val model = modelSrv(tpe).getOrElse(sys.error("TODO"))
+ model(attrs)
+ }
+ }
+ (entities, total)
+ }
+
+ def apply(model: BaseModelDef, queryDef: QueryDef, range: Option[String], sortBy: Seq[String])(
+ implicit ec: ExecutionContext
+ ): (Source[BaseEntity, NotUsed], Future[Long]) = {
+ val (src, total) = dbfind(range, sortBy)(indexName => search(indexName).query(and("relations" ~= model.modelName, queryDef).query))
+ val entities = src.map(attrs => model(attrs))
+ (entities, total)
+ }
+
+ def apply[M <: AbstractModelDef[M, E], E <: BaseEntity](
+ model: M,
+ queryDef: QueryDef,
+ range: Option[String],
+ sortBy: Seq[String]
+ )(implicit ec: ExecutionContext): (Source[E, NotUsed], Future[Long]) = {
+ val (src, total) = dbfind(range, sortBy)(indexName => search(indexName).query(and("relations" ~= model.modelName, queryDef).query))
+ val entities = src.map(attrs => model(attrs))
+ (entities, total)
+ }
+
+ def apply(model: BaseModelDef, queryDef: QueryDef, aggs: Agg*)(implicit ec: ExecutionContext): Future[JsObject] =
+ dbfind(indexName =>
+ search(indexName).query(and("relations" ~= model.modelName, queryDef).query).aggregations(aggs.flatMap(_.apply(model))).size(0)
+ ).map { searchResponse =>
+ aggs
+ .map(_.processResult(model, searchResponse.aggregations))
+ .reduceOption(_ ++ _)
+ .getOrElse(JsObject.empty)
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/services/GetSrv.scala b/elastic4play/app/org/elastic4play/services/GetSrv.scala
new file mode 100644
index 000000000..3c90a4161
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/GetSrv.scala
@@ -0,0 +1,15 @@
+package org.elastic4play.services
+
+import javax.inject.{Inject, Singleton}
+
+import scala.concurrent.{ExecutionContext, Future}
+
+import org.elastic4play.database.DBGet
+import org.elastic4play.models.{AbstractModelDef, EntityDef}
+
+@Singleton
+class GetSrv @Inject() (dbGet: DBGet) {
+
+ def apply[M <: AbstractModelDef[M, E], E <: EntityDef[M, E]](model: M, id: String)(implicit ec: ExecutionContext): Future[E] =
+ dbGet(model.modelName, id).map(attrs => model(attrs))
+}
diff --git a/elastic4play/app/org/elastic4play/services/JsonFormat.scala b/elastic4play/app/org/elastic4play/services/JsonFormat.scala
new file mode 100644
index 000000000..1331c5e05
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/JsonFormat.scala
@@ -0,0 +1,216 @@
+package org.elastic4play.services
+
+import scala.jdk.CollectionConverters._
+
+import play.api.libs.json._
+import play.api.{Configuration, Logger}
+
+import com.typesafe.config.ConfigValueType._
+import com.typesafe.config.{ConfigList, ConfigObject, ConfigValue}
+
+import org.elastic4play.models.JsonFormat._
+import org.elastic4play.services.QueryDSL._
+import org.elastic4play.utils.Hash
+import org.elastic4play.utils.JsonFormat.hashFormat
+
+object JsonFormat {
+ private[JsonFormat] lazy val logger = Logger(getClass)
+
+ private val attachmentWrites: OWrites[Attachment] = OWrites[Attachment] { attachment =>
+ Json.obj(
+ "name" -> attachment.name,
+ "hashes" -> attachment.hashes,
+ "size" -> attachment.size,
+ "contentType" -> attachment.contentType,
+ "id" -> attachment.id
+ )
+ }
+
+ private val attachmentReads: Reads[Attachment] = Reads { json =>
+ for {
+ name <- (json \ "name").validate[String]
+ hashes <- (json \ "hashes").validate[Seq[Hash]]
+ size <- (json \ "size").validate[Long]
+ contentType <- (json \ "contentType").validate[String]
+ id <- (json \ "id").validate[String]
+ } yield Attachment(name, hashes, size, contentType, id)
+ }
+
+ implicit val attachmentFormat: OFormat[Attachment] = OFormat(attachmentReads, attachmentWrites)
+
+ implicit val roleWrites: Writes[Role] = Writes[Role](role => JsString(role.name))
+
+ implicit def configWrites: OWrites[Configuration] = OWrites[Configuration] { cfg =>
+ JsObject(cfg.subKeys.map(key => key -> configValueWrites.writes(cfg.underlying.getValue(key))).toSeq)
+ }
+
+ implicit def configValueWrites: Writes[ConfigValue] = Writes[ConfigValue] {
+ case v: ConfigObject => configWrites.writes(Configuration(v.toConfig))
+ case v: ConfigList => JsArray(v.asScala.map(x => configValueWrites.writes(x)))
+ case v if v.valueType == NUMBER => JsNumber(BigDecimal(v.unwrapped.asInstanceOf[Number].toString))
+ case v if v.valueType == BOOLEAN => JsBoolean(v.unwrapped.asInstanceOf[Boolean])
+ case v if v.valueType == NULL => JsNull
+ case v if v.valueType == STRING => JsString(v.unwrapped.asInstanceOf[String])
+ }
+
+ //def jsonGet[A](json: JsValue, name: String)(implicit reads: Reads[A]) = (json \ name).as[A]
+
+ object JsObj {
+
+ def unapply(v: JsValue): Option[Seq[(String, JsValue)]] = v match {
+ case JsObject(f) => Some(f.toSeq)
+ case _ => None
+ }
+ }
+
+ object JsObjOne {
+
+ def unapply(v: JsValue): Option[(String, JsValue)] = v match {
+ case JsObject(f) if f.size == 1 => f.toSeq.headOption
+ case _ => None
+ }
+ }
+
+ object JsVal {
+
+ def unapply(v: JsValue): Option[Any] = v match {
+ case JsString(s) => Some(s)
+ case JsBoolean(b) => Some(b)
+ case JsNumber(i) => Some(i)
+ case _ => None
+ }
+ }
+
+ object JsRange {
+
+ def unapply(v: JsValue): Option[(String, Any, Any)] =
+ for {
+ field <- (v \ "_field").asOpt[String]
+ jsFrom <- (v \ "_from").asOpt[JsValue]
+ from <- JsVal.unapply(jsFrom)
+ jsTo <- (v \ "_to").asOpt[JsValue]
+ to <- JsVal.unapply(jsTo)
+ } yield (field, from, to)
+ }
+
+ object JsParent {
+
+ def unapply(v: JsValue): Option[(String, QueryDef)] =
+ for {
+ t <- (v \ "_type").asOpt[String]
+ q <- (v \ "_query").asOpt[QueryDef]
+ } yield (t, q)
+ }
+
+ object JsParentId {
+
+ def unapply(v: JsValue): Option[(String, String)] =
+ for {
+ t <- (v \ "_type").asOpt[String]
+ i <- (v \ "_id").asOpt[String]
+ } yield (t, i)
+ }
+
+ object JsField {
+
+ def unapply(v: JsValue): Option[(String, Any)] =
+ for {
+ f <- (v \ "_field").asOpt[String]
+ maybeValue <- (v \ "_value").asOpt[JsValue]
+ value <- JsVal.unapply(maybeValue)
+ } yield (f, value)
+ }
+
+ object JsFieldIn {
+
+ def unapply(v: JsValue): Option[(String, Seq[String])] =
+ for {
+ f <- (v \ "_field").asOpt[String]
+ jsValues <- (v \ "_values").asOpt[Seq[JsValue]]
+ values = jsValues.flatMap(JsVal.unapply)
+ } yield f -> values.map(_.toString)
+ }
+
+ object JsAgg {
+
+ def unapply(v: JsValue): Option[(String, Option[String], JsValue)] =
+ for {
+ agg <- (v \ "_agg").asOpt[String]
+ aggName = (v \ "_name").asOpt[String]
+ } yield (agg, aggName, v)
+ }
+
+ object JsAggFieldQuery {
+
+ def unapply(v: JsValue): Option[(String, Option[QueryDef])] =
+ for {
+ field <- (v \ "_field").asOpt[String]
+ query = (v \ "_query").asOpt[QueryDef]
+ } yield (field, query)
+ }
+
+ implicit val queryReads: Reads[QueryDef] = {
+ Reads {
+ case JsObjOne(("_and", JsArray(v))) => JsSuccess(and(v.map(_.as[QueryDef](queryReads)).toSeq: _*))
+ case JsObjOne(("_or", JsArray(v))) => JsSuccess(or(v.map(_.as[QueryDef](queryReads)).toSeq: _*))
+ case JsObjOne(("_contains", JsString(v))) => JsSuccess(contains(v))
+ case JsObjOne(("_not", v: JsObject)) => JsSuccess(not(v.as[QueryDef](queryReads)))
+ case JsObjOne(("_any", _)) => JsSuccess(any)
+ case j: JsObject if j.fields.isEmpty => JsSuccess(any)
+ case JsObjOne(("_gt", JsObjOne(n, JsVal(v)))) => JsSuccess(n ~> v)
+ case JsObjOne(("_gte", JsObjOne(n, JsVal(v)))) => JsSuccess(n ~>= v)
+ case JsObjOne(("_lt", JsObjOne(n, JsVal(v)))) => JsSuccess(n ~< v)
+ case JsObjOne(("_lte", JsObjOne(n, JsVal(v)))) => JsSuccess(n ~<= v)
+ case JsObjOne(("_between", JsRange(n, f, t))) => JsSuccess(n ~<> (f -> t))
+ case JsObjOne(("_parent", JsParent(p, q))) => JsSuccess(parent(p, q))
+ case JsObjOne(("_parent", JsParentId(p, i))) => JsSuccess(withParent(p, i))
+ case JsObjOne(("_id", JsString(id))) => JsSuccess(withId(id))
+ case JsField(field, value) => JsSuccess(field ~= value)
+ case JsObjOne(("_child", JsParent(p, q))) => JsSuccess(child(p, q))
+ case JsObjOne(("_string", JsString(s))) => JsSuccess(string(s))
+ case JsObjOne(("_in", JsFieldIn(f, v))) => JsSuccess(f in (v: _*))
+ case JsObjOne(("_type", JsString(v))) => JsSuccess(ofType(v))
+ case JsObjOne(("_like", JsField(field, value))) => JsSuccess(field like value)
+ case JsObjOne(("_wildcard", JsField(field, value))) => JsSuccess(field ~=~ value)
+ case JsObjOne((n, JsVal(v))) =>
+ if (n.startsWith("_")) logger.warn(s"""Potentially invalid search query : {"$n": "$v"}"""); JsSuccess(n ~= v)
+ case other => JsError(s"Invalid query: unexpected $other")
+ }
+ }
+
+ implicit val aggReads: Reads[Agg] = Reads {
+ case JsAgg("avg", aggregationName, JsAggFieldQuery(field, query)) => JsSuccess(selectAvg(aggregationName, field, query))
+ case JsAgg("min", aggregationName, JsAggFieldQuery(field, query)) => JsSuccess(selectMin(aggregationName, field, query))
+ case JsAgg("max", aggregationName, JsAggFieldQuery(field, query)) => JsSuccess(selectMax(aggregationName, field, query))
+ case JsAgg("sum", aggregationName, JsAggFieldQuery(field, query)) => JsSuccess(selectSum(aggregationName, field, query))
+ case json @ JsAgg("count", aggregationName, _) => JsSuccess(selectCount(aggregationName, (json \ "_query").asOpt[QueryDef]))
+ case json @ JsAgg("top", aggregationName, _) =>
+ val size = (json \ "_size").asOpt[Int].getOrElse(10)
+ val order = (json \ "_order").asOpt[Seq[String]].getOrElse(Nil)
+ JsSuccess(selectTop(aggregationName, size, order))
+ case json @ JsAgg("time", aggregationName, _) =>
+ val fields = (json \ "_fields").as[Seq[String]]
+ val interval = (json \ "_interval").as[String]
+ val selectables = (json \ "_select").as[Seq[Agg]]
+ JsSuccess(groupByTime(aggregationName, fields, interval, selectables: _*))
+ case json @ JsAgg("field", aggregationName, _) =>
+ val field = (json \ "_field").as[String]
+ val size = (json \ "_size").asOpt[Int].getOrElse(10)
+ val order = (json \ "_order").asOpt[Seq[String]].getOrElse(Nil)
+ val selectables = (json \ "_select").as[Seq[Agg]]
+ JsSuccess(groupByField(aggregationName, field, size, order, selectables: _*))
+ case json @ JsAgg("category", aggregationName, _) =>
+ val categories = (json \ "_categories").as[Map[String, QueryDef]]
+ val selectables = (json \ "_select").as[Seq[Agg]]
+ JsSuccess(groupByCaterogy(aggregationName, categories, selectables: _*))
+ case unexpected: JsValue => JsError(s"Unexpected JsValue $unexpected")
+ }
+
+ implicit val authContextWrites: OWrites[AuthContext] = OWrites[AuthContext] { authContext =>
+ Json.obj("id" -> authContext.userId, "name" -> authContext.userName, "roles" -> authContext.roles)
+ }
+
+ implicit val auditableActionFormat: Format[AuditableAction.Type] = enumFormat(AuditableAction)
+
+ implicit val AuditOperationWrites: OWrites[AuditOperation] = Json.writes[AuditOperation]
+}
diff --git a/elastic4play/app/org/elastic4play/services/MigrationSrv.scala b/elastic4play/app/org/elastic4play/services/MigrationSrv.scala
new file mode 100644
index 000000000..78e9d792a
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/MigrationSrv.scala
@@ -0,0 +1,307 @@
+package org.elastic4play.services
+
+import akka.NotUsed
+import akka.actor.ActorSystem
+import akka.stream.Materializer
+import akka.stream.scaladsl.{Sink, Source}
+import com.sksamuel.elastic4s.ElasticDsl._
+import javax.inject.{Inject, Singleton}
+import org.elastic4play.InternalError
+import org.elastic4play.database._
+import play.api.Logger
+import play.api.libs.json.JsValue.jsValueToJsLookup
+import play.api.libs.json.Json.toJsFieldJsValueWrapper
+import play.api.libs.json._
+
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.{Failure, Success}
+
+case class MigrationEvent(modelName: String, current: Long, total: Long) extends EventMessage
+
+case object EndOfMigrationEvent extends EventMessage
+
+trait MigrationOperations {
+ val operations: PartialFunction[DatabaseState, Seq[Operation]]
+ def beginMigration(version: Int): Future[Unit]
+ def endMigration(version: Int): Future[Unit]
+}
+
+/* DatabaseState is the state of a specific version of the database.
+ * States are linked from last present version to the desired version
+ */
+abstract class DatabaseState {
+ def version: Int
+ def source(tableName: String): Source[JsObject, NotUsed]
+ def count(tableName: String): Future[Long]
+ def getEntity(tableName: String, id: String): Future[JsObject]
+}
+
+object DatabaseState {
+ def unapply(s: DatabaseState): Option[Int] = Some(s.version)
+}
+
+@Singleton
+class MigrationSrv @Inject() (
+ migration: MigrationOperations,
+ db: DBConfiguration,
+ dbcreate: DBCreate,
+ dbfind: DBFind,
+ dbindex: DBIndex,
+ modelSrv: ModelSrv,
+ eventSrv: EventSrv,
+ implicit val system: ActorSystem,
+ implicit val ec: ExecutionContext,
+ implicit val mat: Materializer
+) {
+
+ private[MigrationSrv] lazy val logger = Logger(getClass)
+
+ /* Constructed state of the database from the previous version */
+ class MigrationTransition(db: DBConfiguration, previousState: DatabaseState, operations: Seq[Operation]) extends DatabaseState {
+ override def version: Int = db.version
+ override def source(tableName: String): Source[JsObject, NotUsed] = operations.foldLeft(previousState.source _)((f, op) => op(f))(tableName)
+ override def count(tableName: String): Future[Long] = previousState.count(tableName)
+ override def getEntity(tableName: String, id: String): Future[JsObject] =
+ previousState.getEntity(tableName, id).flatMap { previousValue =>
+ operations
+ .foldLeft((_: String) => Source.single(previousValue))((f, op) => op(f))(tableName)
+ .runWith(Sink.head)
+ }
+ }
+
+ /* Last version of database */
+ class OriginState(db: DBConfiguration) extends DatabaseState {
+ private val currentdbfind = dbfind.switchTo(db)
+ private lazy val currentdbget = new DBGet(db)
+ override def version: Int = db.version
+ override def source(tableName: String): Source[JsObject, NotUsed] =
+ currentdbfind.apply(Some("all"), Nil)(indexName => search(indexName).matchQuery("relations", tableName))._1
+ override def count(tableName: String): Future[Long] = new DBIndex(db, 0, 0, Map.empty).getSize(tableName)
+ override def getEntity(tableName: String, entityId: String): Future[JsObject] = currentdbget(tableName, entityId)
+ }
+
+ /* If there is no database, use empty one */
+ object EmptyState extends DatabaseState {
+ override def version = 1
+ override def source(tableName: String): Source[JsObject, NotUsed] = Source.empty[JsObject]
+ override def count(tableName: String): Future[Long] = Future.successful(0)
+ override def getEntity(tableName: String, id: String): Future[JsObject] = Future.failed(new Exception("TODO"))
+ }
+
+ def migrationPath(db: DBConfiguration): Future[(Int, DatabaseState)] =
+ new DBIndex(db, 0, 0, Map.empty).getIndexStatus.flatMap {
+ case true =>
+ logger.info(s"Initiate database migration from version ${db.version}")
+ Future.successful(db.version -> new OriginState(db))
+ case false if db.version == 1 =>
+ logger.info("Create a new empty database")
+ Future.successful(0 -> EmptyState)
+ case false =>
+ migrationPath(db.previousVersion).map {
+ case (v, s) =>
+ logger.info(s"Migrate database from version $v, add operations for version ${db.version}")
+ val operations = migration
+ .operations
+ .applyOrElse(s, (_: DatabaseState) => throw InternalError(s"No operation for version ${s.version}, migration impossible"))
+ v -> new MigrationTransition(db, s, operations)
+ }
+ }
+
+ def migrationEvent(modelName: String, current: Long, total: Long): JsObject =
+ Json.obj("objectType" -> "migration", "rootId" -> "none", "tableName" -> modelName, "current" -> current, "total" -> total)
+
+ def migrateEntities(modelName: String, entities: Source[JsObject, _], total: Long): Future[Unit] = {
+ val count = Source.fromIterator(() => Iterator.from(1))
+ val r = entities
+ .zipWith(count) { (entity, current) =>
+ eventSrv.publish(MigrationEvent(modelName, current.toLong, total))
+ (entity \ "_type").asOpt[JsString].fold(entity) { t =>
+ val relations = (entity \ "_parent").asOpt[JsString].fold[JsValue](t)(p => Json.obj("name" -> t, "parent" -> p))
+ entity - "_type" - "_parent" + ("relations" -> relations)
+ }
+ }
+ .runWith(dbcreate.sink())
+ r.onComplete { x =>
+ println(s"migrateEntity($modelName) has finished : $x")
+ }
+ r
+ }
+
+ def migrateTable(mig: DatabaseState, table: String): Future[Unit] =
+ mig
+ .count(table)
+ .flatMap { total =>
+ logger.info(s"Migrating $total entities from $table")
+ migrateEntities(table, mig.source(table), total)
+ }
+
+ private var migrationProcess = Future.successful(())
+
+ def migrate: Future[Unit] = {
+ if (!dbindex.indexStatus && migrationProcess.isCompleted) {
+ val models = modelSrv.list
+ migrationProcess = migrationPath(db)
+ .flatMap { mig =>
+ dbindex.createIndex(new SequenceModel +: models).map(_ => mig)
+ }
+ .flatMap { versionMig =>
+ migration.beginMigration(versionMig._1).map(_ => versionMig)
+ }
+ // for all tables, get entities from migrationPath and insert in current database
+ .flatMap {
+ case (version, mig) =>
+ Future
+ .sequence(
+ ("sequence" +: models.map(_.modelName).sorted)
+ .distinct
+ .map(t =>
+ migrateTable(mig, t).recover {
+ case error => logger.error(s"Migration of table $t failed :", error)
+ }
+ )
+ )
+ .flatMap(_ => migration.endMigration(version))
+ }
+ migrationProcess.onComplete {
+ case Success(_) =>
+ logger.info("End of migration")
+ eventSrv.publish(EndOfMigrationEvent)
+ case Failure(t) =>
+ logger.error("Migration fail", t)
+ Future.failed(t)
+ }
+ }
+ migrationProcess
+ }
+
+ def isMigrating: Boolean = !migrationProcess.isCompleted
+ def isReady: Boolean = dbindex.indexStatus && migrationProcess.isCompleted
+}
+
+/* Operation applied to the previous state of the database to get next version */
+trait Operation extends ((String => Source[JsObject, NotUsed]) => (String => Source[JsObject, NotUsed]))
+
+object Operation {
+
+ def apply(o: (String => Source[JsObject, NotUsed]) => String => Source[JsObject, NotUsed]): Operation =
+ (f: String => Source[JsObject, NotUsed]) => o(f)
+
+ def renameEntity(previous: String, next: String): Operation =
+ Operation((f: String => Source[JsObject, NotUsed]) => {
+ case `next` => f(previous).map(_ + ("_type" -> JsString(next)))
+ case "audit" =>
+ f("audit").map { x =>
+ (x \ "objectType").asOpt[String] match {
+ case Some(`previous`) => x - "objectType" + ("objectType" -> JsString(next))
+ case _ => x
+ }
+ }
+ case other => f(other)
+ })
+
+ def mapEntity(tableFilter: String => Boolean, transform: JsObject => JsObject): Operation =
+ Operation((f: String => Source[JsObject, NotUsed]) => {
+ case table if tableFilter(table) => f(table).map(transform)
+ case other => f(other)
+ })
+
+ def mapEntity(tables: String*)(transform: JsObject => JsObject): Operation = mapEntity(tables.contains, transform)
+
+ def apply(table: String)(transform: JsObject => JsObject): Operation = mapEntity(_ == table, transform)
+
+ def removeEntity(tableFilter: String => Boolean, filter: JsObject => Boolean): Operation =
+ Operation((f: String => Source[JsObject, NotUsed]) => {
+ case table if tableFilter(table) => f(table).filterNot(filter)
+ case other => f(other)
+ })
+
+ def removeEntity(tables: String*)(filter: JsObject => Boolean): Operation = removeEntity(tables.contains, filter)
+
+ def removeEntity(table: String)(filter: JsObject => Boolean): Operation = removeEntity(_ == table, filter)
+
+ def renameAttribute(tableFilter: String => Boolean, newName: String, oldNamePath: Seq[String]): Operation =
+ Operation((f: String => Source[JsObject, NotUsed]) => {
+ // rename attribute in the selected entities
+ case table if tableFilter(table) =>
+ f(table).map { o =>
+ rename(o, newName, oldNamePath)
+ }
+ case "audit" => f("audit").map(o => rename(o, newName, "details" +: oldNamePath))
+ case other => f(other)
+ })
+
+ def renameAttribute(tables: Seq[String], newName: String, oldNamePath: String*): Operation =
+ renameAttribute(a => tables.contains(a), newName, oldNamePath)
+
+ def renameAttribute(table: String, newName: String, oldNamePath: String*): Operation = renameAttribute(_ == table, newName, oldNamePath)
+
+ def rename(value: JsObject, newName: String, path: Seq[String]): JsObject =
+ if (path.isEmpty) {
+ value
+ } else {
+ val head = path.head
+ val tail = path.tail
+ value \ head match {
+ case JsDefined(v) if tail.isEmpty => value - head + (newName -> v)
+ case JsDefined(v: JsObject) => value - head + (head -> rename(v, newName, tail))
+ case _ => value
+ }
+ }
+
+ def mapAttribute(tableFilter: String => Boolean, attribute: String, transform: JsValue => JsValue): Operation =
+ mapEntity(
+ tableFilter,
+ x =>
+ x \ attribute match {
+ case _: JsUndefined => x
+ case JsDefined(a) => x + (attribute -> transform(a))
+ }
+ )
+
+ def mapAttribute(tables: Seq[String], attribute: String)(transform: JsValue => JsValue): Operation =
+ mapAttribute(a => tables.contains(a), attribute, transform)
+
+ def mapAttribute(table: String, attribute: String)(transform: JsValue => JsValue): Operation = mapAttribute(_ == table, attribute, transform)
+
+ def removeAttribute(tableFilter: String => Boolean, attributes: String*): Operation =
+ mapEntity(
+ tableFilter,
+ x =>
+ attributes.foldLeft(x) { (y, a) =>
+ y - a
+ }
+ )
+
+ def removeAttribute(tables: Seq[String], attributes: String*): Operation = removeAttribute(a => tables.contains(a), attributes: _*)
+
+ def removeAttribute(table: String, attributes: String*): Operation = removeAttribute(_ == table, attributes: _*)
+
+ def addAttribute(tableFilter: String => Boolean, attributes: (String, JsValue)*): Operation =
+ mapEntity(
+ tableFilter,
+ x =>
+ attributes.foldLeft(x) { (y, a) =>
+ y + a
+ }
+ )
+
+ def addAttribute(tables: Seq[String], attributes: (String, JsValue)*): Operation = addAttribute(t => tables.contains(t), attributes: _*)
+
+ def addAttribute(table: String, attributes: (String, JsValue)*): Operation = addAttribute(_ == table, attributes: _*)
+
+ def addAttributeIfAbsent(tableFilter: String => Boolean, attributes: (String, JsValue)*): Operation =
+ mapEntity(
+ tableFilter,
+ x =>
+ attributes.foldLeft(x) { (y, a) =>
+ y \ a._1 match {
+ case _: JsUndefined => x + a
+ case _ => x
+ }
+ }
+ )
+
+ def addAttributeIfAbsent(tables: Seq[String], attributes: (String, JsValue)*): Operation = addAttribute(t => tables.contains(t), attributes: _*)
+
+ def addAttributeIfAbsent(table: String, attributes: (String, JsValue)*): Operation = addAttribute(_ == table, attributes: _*)
+}
diff --git a/elastic4play/app/org/elastic4play/services/ModelSrv.scala b/elastic4play/app/org/elastic4play/services/ModelSrv.scala
new file mode 100644
index 000000000..b6ce3794e
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/ModelSrv.scala
@@ -0,0 +1,14 @@
+package org.elastic4play.services
+
+import javax.inject.{Inject, Provider, Singleton}
+
+import scala.collection.immutable
+
+import org.elastic4play.models.BaseModelDef
+
+@Singleton
+class ModelSrv @Inject() (models: Provider[immutable.Set[BaseModelDef]]) {
+ private[ModelSrv] lazy val modelMap = models.get.map(m => m.modelName -> m).toMap
+ def apply(modelName: String): Option[BaseModelDef] = modelMap.get(modelName)
+ lazy val list: Seq[BaseModelDef] = models.get.toSeq
+}
diff --git a/elastic4play/app/org/elastic4play/services/QueryDSL.scala b/elastic4play/app/org/elastic4play/services/QueryDSL.scala
new file mode 100644
index 000000000..06c9bedd3
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/QueryDSL.scala
@@ -0,0 +1,109 @@
+package org.elastic4play.services
+
+import com.sksamuel.elastic4s.ElasticDsl.{
+ boolQuery,
+ existsQuery,
+ hasChildQuery,
+ hasParentQuery,
+ idsQuery,
+ matchAllQuery,
+ matchQuery,
+ nestedQuery,
+ query,
+ rangeQuery,
+ termQuery,
+ termsQuery,
+ wildcardQuery
+}
+import com.sksamuel.elastic4s.requests.searches.ScoreMode
+import com.sksamuel.elastic4s.requests.searches.queries.Query
+import org.elastic4play.models.BaseEntity
+
+object QueryDSL {
+
+ def selectAvg(aggregationName: Option[String], field: String, query: Option[QueryDef]): SelectAvg =
+ new SelectAvg(aggregationName.getOrElse(s"avg_$field"), field, query)
+ def selectAvg(field: String): SelectAvg = selectAvg(None, field, None)
+
+ def selectMin(aggregationName: Option[String], field: String, query: Option[QueryDef]): SelectMin =
+ new SelectMin(aggregationName.getOrElse(s"min_$field"), field, query)
+ def selectMin(field: String): SelectMin = selectMin(None, field, None)
+
+ def selectMax(aggregationName: Option[String], field: String, query: Option[QueryDef]): SelectMax =
+ new SelectMax(aggregationName.getOrElse(s"max_$field"), field, query)
+ def selectMax(field: String): SelectMax = selectMax(None, field, None)
+
+ def selectSum(aggregationName: Option[String], field: String, query: Option[QueryDef]): SelectSum =
+ new SelectSum(aggregationName.getOrElse(s"sum_$field"), field, query)
+ def selectSum(field: String): SelectSum = selectSum(None, field, None)
+ def selectCount(aggregationName: Option[String], query: Option[QueryDef]): SelectCount = new SelectCount(aggregationName.getOrElse("count"), query)
+ val selectCount: SelectCount = selectCount(None, None)
+
+ def selectTop(aggregationName: Option[String], size: Int, sortBy: Seq[String]): SelectTop =
+ new SelectTop(aggregationName.getOrElse("top"), size, sortBy)
+ def selectTop(size: Int, sortBy: Seq[String]): SelectTop = selectTop(None, size, sortBy)
+
+ def groupByTime(aggregationName: Option[String], fields: Seq[String], interval: String, selectables: Agg*): GroupByTime =
+ new GroupByTime(aggregationName.getOrElse("datehistogram"), fields, interval, selectables)
+ def groupByTime(fields: Seq[String], interval: String, selectables: Agg*): GroupByTime = groupByTime(None, fields, interval, selectables: _*)
+
+ def groupByField(aggregationName: Option[String], field: String, size: Int, sortBy: Seq[String], selectables: Agg*): GroupByField =
+ new GroupByField(aggregationName.getOrElse("term"), field, Some(size), sortBy, selectables)
+
+ def groupByField(aggregationName: Option[String], field: String, selectables: Agg*): GroupByField =
+ new GroupByField(aggregationName.getOrElse("term"), field, None, Nil, selectables)
+ def groupByField(field: String, selectables: Agg*): GroupByField = groupByField(None, field, selectables: _*)
+
+ def groupByCaterogy(aggregationName: Option[String], categories: Map[String, QueryDef], selectables: Agg*) =
+ new GroupByCategory(aggregationName.getOrElse("categories"), categories, selectables)
+
+ private def nestedField(field: String, q: String => Query) = field match {
+ case "_type" => q("relations")
+ case _ =>
+ field
+ .split("\\.")
+ .init
+ .inits
+ .toSeq
+ .init
+ .foldLeft(q(field)) {
+ case (queryDef, subName) => nestedQuery(subName.mkString("."), queryDef).scoreMode(ScoreMode.None)
+ }
+ }
+
+ implicit class SearchField(field: String) {
+ private def convertValue(value: Any): Any = value match {
+ case _: Enumeration#Value => value.toString
+ case bd: BigDecimal => bd.toDouble
+ case _ => value
+ }
+ def ~=(value: Any): QueryDef = QueryDef(nestedField(field, termQuery(_, convertValue(value))))
+ def ~=~(value: Any): QueryDef = QueryDef(nestedField(field, wildcardQuery(_, convertValue(value))))
+ def like(value: Any): QueryDef = QueryDef(nestedField(field, matchQuery(_, convertValue(value))))
+ def ~!=(value: Any): QueryDef = not(QueryDef(nestedField(field, termQuery(_, convertValue(value)))))
+ def ~<(value: Any): QueryDef = QueryDef(nestedField(field, rangeQuery(_).lt(value.toString)))
+ def ~>(value: Any): QueryDef = QueryDef(nestedField(field, rangeQuery(_).gt(value.toString)))
+ def ~<=(value: Any): QueryDef = QueryDef(nestedField(field, rangeQuery(_).lte(value.toString)))
+ def ~>=(value: Any): QueryDef = QueryDef(nestedField(field, rangeQuery(_).gte(value.toString)))
+ def ~<>(value: (Any, Any)): QueryDef = QueryDef(nestedField(field, rangeQuery(_).gt(value._1.toString).lt(value._2.toString)))
+ def ~=<>=(value: (Any, Any)): QueryDef = QueryDef(nestedField(field, rangeQuery(_).gte(value._1.toString).lte(value._2.toString)))
+ def in(values: String*): QueryDef = QueryDef(nestedField(field, termsQuery(_, values)))
+ }
+
+ def ofType(value: String): QueryDef = QueryDef(termQuery("relations", value))
+ def withId(entityIds: String*): QueryDef = QueryDef(idsQuery(entityIds))
+ def any: QueryDef = QueryDef(matchAllQuery())
+ def contains(field: String): QueryDef = QueryDef(nestedField(field, existsQuery))
+ def or(queries: QueryDef*): QueryDef = or(queries)
+ def or(queries: Iterable[QueryDef]): QueryDef = QueryDef(boolQuery().should(queries.map(_.query)))
+ def and(queries: QueryDef*): QueryDef = QueryDef(boolQuery().must(queries.map(_.query)))
+ def and(queries: Iterable[QueryDef]): QueryDef = QueryDef(boolQuery().must(queries.map(_.query)))
+ def not(query: QueryDef): QueryDef = QueryDef(boolQuery().not(query.query))
+ def child(childType: String, query: QueryDef): QueryDef = QueryDef(hasChildQuery(childType, query.query, ScoreMode.None))
+ def parent(parentType: String, query: QueryDef): QueryDef = QueryDef(hasParentQuery(parentType, query.query, score = false))
+ def withParent(parent: BaseEntity): QueryDef = withParent(parent.model.modelName, parent.id)
+
+ def withParent(parentType: String, parentId: String): QueryDef =
+ QueryDef(hasParentQuery(parentType, idsQuery(parentId), score = false))
+ def string(queryString: String): QueryDef = QueryDef(query(queryString))
+}
diff --git a/elastic4play/app/org/elastic4play/services/SequenceSrv.scala b/elastic4play/app/org/elastic4play/services/SequenceSrv.scala
new file mode 100644
index 000000000..5cf20cdb4
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/SequenceSrv.scala
@@ -0,0 +1,8 @@
+package org.elastic4play.services
+
+import javax.inject.{Inject, Singleton}
+
+import org.elastic4play.database.{DBConfiguration, DBSequence}
+
+@Singleton
+class SequenceSrv @Inject() (db: DBConfiguration) extends DBSequence(db)
diff --git a/elastic4play/app/org/elastic4play/services/TempSrv.scala b/elastic4play/app/org/elastic4play/services/TempSrv.scala
new file mode 100644
index 000000000..d2694d720
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/TempSrv.scala
@@ -0,0 +1,76 @@
+package org.elastic4play.services
+
+import java.io.IOException
+import java.nio.file._
+import java.nio.file.attribute.BasicFileAttributes
+import javax.inject.{Inject, Singleton}
+
+import scala.concurrent.{ExecutionContext, Future}
+
+import play.api.Logger
+import play.api.inject.ApplicationLifecycle
+import play.api.mvc.{Filter, RequestHeader, Result}
+
+import akka.stream.Materializer
+
+import org.elastic4play.utils.Instance
+
+@Singleton
+class TempSrv @Inject() (lifecycle: ApplicationLifecycle) {
+
+ private[TempSrv] lazy val logger = Logger(getClass)
+
+ private[TempSrv] val tempDir = Files.createTempDirectory(Paths.get(System.getProperty("java.io.tmpdir")), "").resolve("play-request")
+ lifecycle.addStopHook { () =>
+ Future.successful(delete(tempDir))
+ }
+
+ private[TempSrv] object deleteVisitor extends SimpleFileVisitor[Path] {
+ override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = {
+ Files.delete(file)
+ FileVisitResult.CONTINUE
+ }
+
+ override def postVisitDirectory(dir: Path, e: IOException): FileVisitResult = {
+ Files.delete(dir)
+ FileVisitResult.CONTINUE
+ }
+ }
+ private[TempSrv] def delete(directory: Path): Unit =
+ try {
+ if (Files.exists(directory))
+ Files.walkFileTree(directory, deleteVisitor)
+ ()
+ } catch {
+ case t: Throwable => logger.warn(s"Fail to remove temporary files ($directory) : $t")
+ }
+
+ private def requestTempDir(requestId: String): Path =
+ tempDir.resolve(requestId.replace(":", "_"))
+
+ def newTemporaryFile(prefix: String, suffix: String)(implicit authContext: AuthContext): Path = {
+ val td = requestTempDir(authContext.requestId)
+ if (!Files.exists(td))
+ Files.createDirectories(td)
+ Files.createTempFile(td, prefix, suffix)
+ }
+
+ def releaseTemporaryFiles()(implicit authContext: AuthContext): Unit =
+ releaseTemporaryFiles(authContext.requestId)
+
+ def releaseTemporaryFiles(request: RequestHeader): Unit =
+ releaseTemporaryFiles(Instance.getRequestId(request))
+
+ def releaseTemporaryFiles(requestId: String): Unit = {
+ val td = requestTempDir(requestId)
+ if (Files.exists(td))
+ delete(td)
+ }
+}
+
+class TempFilter @Inject() (tempSrv: TempSrv, implicit val ec: ExecutionContext, implicit val mat: Materializer) extends Filter {
+
+ override def apply(nextFilter: RequestHeader => Future[Result])(requestHeader: RequestHeader): Future[Result] =
+ nextFilter(requestHeader)
+ .andThen { case _ => tempSrv.releaseTemporaryFiles(requestHeader) }
+}
diff --git a/elastic4play/app/org/elastic4play/services/UpdateSrv.scala b/elastic4play/app/org/elastic4play/services/UpdateSrv.scala
new file mode 100644
index 000000000..ac35fb21f
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/UpdateSrv.scala
@@ -0,0 +1,103 @@
+package org.elastic4play.services
+
+import java.util.Date
+import javax.inject.{Inject, Singleton}
+
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.Try
+
+import play.api.libs.json.Json.toJsFieldJsValueWrapper
+import play.api.libs.json.{JsObject, Json}
+
+import org.scalactic.Accumulation.convertGenTraversableOnceToValidatable
+import org.scalactic.Every.everyToGenTraversableOnce
+import org.scalactic.{Bad, One}
+
+import org.elastic4play.JsonFormat.dateFormat
+import org.elastic4play.controllers.Fields
+import org.elastic4play.database.{DBModify, ModifyConfig}
+import org.elastic4play.models.{AbstractModelDef, BaseEntity, BaseModelDef, EntityDef}
+import org.elastic4play.utils.{RichFuture, RichOr}
+import org.elastic4play.{AttributeCheckingError, UnknownAttributeError}
+
+@Singleton
+class UpdateSrv @Inject() (
+ fieldsSrv: FieldsSrv,
+ dbModify: DBModify,
+ getSrv: GetSrv,
+ attachmentSrv: AttachmentSrv,
+ eventSrv: EventSrv
+) {
+
+ /**
+ * Check if entity attributes are valid. Format is not checked as it has been already checked.
+ */
+ private[services] def checkAttributes(attrs: JsObject, model: BaseModelDef): Future[JsObject] =
+ attrs
+ .fields
+ .map {
+ case (name, value) =>
+ val names = name.split("\\.").toSeq
+ (name, names, value, model.modelAttributes.get(names.head))
+ }
+ .validatedBy {
+ case (name, _, value, None) => Bad(One(UnknownAttributeError(name, value)))
+ case (name, names, value, Some(attr)) => attr.validateForUpdate(names.tail, value).map(name -> _)
+ }
+ .fold(attrs => Future.successful(JsObject(attrs)), errors => Future.failed(AttributeCheckingError(model.modelName, errors)))
+
+ private[services] def doUpdate[E <: BaseEntity](entity: E, attributes: JsObject, modifyConfig: ModifyConfig)(
+ implicit authContext: AuthContext,
+ ec: ExecutionContext
+ ): Future[E] =
+ for {
+ attributesAfterHook <- entity.model.updateHook(entity, addMetaFields(attributes))
+ checkedAttributes <- checkAttributes(attributesAfterHook, entity.model)
+ attributesWithAttachment <- attachmentSrv(entity.model)(checkedAttributes)
+ newEntity <- dbModify(entity, attributesWithAttachment, modifyConfig)
+ } yield newEntity.asInstanceOf[E]
+
+ private[services] def addMetaFields(attrs: JsObject)(implicit authContext: AuthContext): JsObject =
+ attrs ++
+ Json.obj("updatedBy" -> authContext.userId, "updatedAt" -> Json.toJson(new Date))
+
+ private[services] def removeMetaFields(attrs: JsObject): JsObject = attrs - "updatedBy" - "updatedAt"
+
+ def apply[M <: AbstractModelDef[M, E], E <: EntityDef[M, E]](model: M, id: String, fields: Fields, modifyConfig: ModifyConfig)(
+ implicit authContext: AuthContext,
+ ec: ExecutionContext
+ ): Future[E] =
+ for {
+ entity <- getSrv[M, E](model, id)
+ newEntity <- apply[E](entity, fields, modifyConfig)
+ } yield newEntity
+
+ def apply[M <: AbstractModelDef[M, E], E <: EntityDef[M, E]](model: M, ids: Seq[String], fields: Fields, modifyConfig: ModifyConfig)(
+ implicit authContext: AuthContext,
+ ec: ExecutionContext
+ ): Future[Seq[Try[E]]] =
+ Future.sequence {
+ ids.map { id =>
+ getSrv[M, E](model, id)
+ .flatMap(entity => apply[E](entity, fields, modifyConfig).toTry)
+ }
+ }
+
+ def apply[E <: BaseEntity](entity: E, fields: Fields, modifyConfig: ModifyConfig)(
+ implicit authContext: AuthContext,
+ ec: ExecutionContext
+ ): Future[E] =
+ for {
+ attributes <- fieldsSrv.parse(fields, entity.model).toFuture
+ newEntity <- doUpdate(entity, attributes, modifyConfig)
+ _ = eventSrv.publish(AuditOperation(newEntity, AuditableAction.Update, removeMetaFields(attributes), authContext))
+ } yield newEntity
+
+ def apply[E <: BaseEntity](entitiesAttributes: Seq[(E, Fields)], modifyConfig: ModifyConfig)(
+ implicit authContext: AuthContext,
+ ec: ExecutionContext
+ ): Future[Seq[Try[E]]] =
+ Future.sequence(entitiesAttributes.map {
+ case (entity, fields) => apply(entity, fields, modifyConfig).toTry
+ })
+}
diff --git a/elastic4play/app/org/elastic4play/services/UserSrv.scala b/elastic4play/app/org/elastic4play/services/UserSrv.scala
new file mode 100644
index 000000000..42d4c714b
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/UserSrv.scala
@@ -0,0 +1,65 @@
+package org.elastic4play.services
+
+import scala.concurrent.Future
+import play.api.libs.json.JsObject
+import play.api.mvc.{RequestHeader, Result}
+import org.elastic4play.{AuthenticationError, AuthorizationError}
+
+abstract class Role(val name: String) {
+ override def toString: String = name
+}
+
+trait AuthContext {
+ def userId: String
+ def userName: String
+ def requestId: String
+ def roles: Seq[Role]
+ def authMethod: String
+}
+
+trait UserSrv {
+ def getFromId(request: RequestHeader, userId: String, authMethod: String): Future[AuthContext]
+ def getFromUser(request: RequestHeader, user: User, authMethod: String): Future[AuthContext]
+ def getInitialUser(request: RequestHeader): Future[AuthContext]
+ def inInitAuthContext[A](block: AuthContext => Future[A]): Future[A]
+ def get(userId: String): Future[User]
+}
+
+trait User {
+ val attributes: JsObject
+ val id: String
+ def getUserName: String
+ def getRoles: Seq[Role]
+}
+
+object AuthCapability extends Enumeration {
+ type Type = Value
+ val changePassword, setPassword, authByKey = Value
+}
+
+trait AuthSrv {
+ val name: String
+ val capabilities = Set.empty[AuthCapability.Type]
+
+ def authenticate(username: String, password: String)(implicit request: RequestHeader): Future[AuthContext] =
+ Future.failed(AuthenticationError("Authentication using login/password is not supported"))
+
+ def authenticate(key: String)(implicit request: RequestHeader): Future[AuthContext] =
+ Future.failed(AuthenticationError("Authentication using API key is not supported"))
+
+ def authenticate()(implicit request: RequestHeader): Future[Either[Result, AuthContext]] =
+ Future.failed(AuthenticationError("SSO authentication is not supported"))
+
+ def changePassword(username: String, oldPassword: String, newPassword: String)(implicit authContext: AuthContext): Future[Unit] =
+ Future.failed(AuthorizationError("Change password is not supported"))
+
+ def setPassword(username: String, newPassword: String)(implicit authContext: AuthContext): Future[Unit] =
+ Future.failed(AuthorizationError("Set password is not supported"))
+
+ def renewKey(username: String)(implicit authContext: AuthContext): Future[String] =
+ Future.failed(AuthorizationError("Renew API key is not supported"))
+ def getKey(username: String)(implicit authContext: AuthContext): Future[String] = Future.failed(AuthorizationError("Get API key is not supported"))
+
+ def removeKey(username: String)(implicit authContext: AuthContext): Future[Unit] =
+ Future.failed(AuthorizationError("Remove API key is not supported"))
+}
diff --git a/elastic4play/app/org/elastic4play/services/auth/ADAuthSrv.scala b/elastic4play/app/org/elastic4play/services/auth/ADAuthSrv.scala
new file mode 100644
index 000000000..83fd97bd0
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/auth/ADAuthSrv.scala
@@ -0,0 +1,129 @@
+package org.elastic4play.services.auth
+
+import java.net.ConnectException
+import java.util
+import javax.inject.{Inject, Singleton}
+import javax.naming.Context
+import javax.naming.directory._
+
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.{Failure, Success, Try}
+
+import play.api.mvc.RequestHeader
+import play.api.{Configuration, Logger}
+
+import org.elastic4play.services._
+import org.elastic4play.{AuthenticationError, AuthorizationError}
+
+case class ADConnection(domainFQDN: String, domainName: String, serverNames: Seq[String], useSSL: Boolean) {
+
+ private[ADConnection] lazy val logger = Logger(classOf[ADAuthSrv])
+
+ private val noADServerAvailableException = AuthenticationError("No LDAP server found")
+
+ private def isFatal(t: Throwable): Boolean = t match {
+ case null => true
+ case `noADServerAvailableException` => false
+ case _: ConnectException => false
+ case _ => isFatal(t.getCause)
+ }
+
+ private def connect[A](username: String, password: String)(f: InitialDirContext => Try[A]): Try[A] =
+ if (password.isEmpty) Failure(AuthenticationError("Authentication failure"))
+ else
+ serverNames.foldLeft[Try[A]](Failure(noADServerAvailableException)) {
+ case (Failure(e), serverName) if !isFatal(e) =>
+ val protocol = if (useSSL) "ldaps://" else "ldap://"
+ val env = new util.Hashtable[Any, Any]
+ env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory")
+ env.put(Context.PROVIDER_URL, protocol + serverName)
+ env.put(Context.SECURITY_AUTHENTICATION, "simple")
+ env.put(Context.SECURITY_PRINCIPAL, username)
+ env.put(Context.SECURITY_CREDENTIALS, password)
+ Try {
+ val ctx = new InitialDirContext(env)
+ try f(ctx)
+ finally ctx.close()
+ }.flatten
+ .recoverWith {
+ case ldapError =>
+ logger.debug("LDAP connect error", ldapError)
+ Failure(ldapError)
+ }
+ case (r, _) => r
+ }
+
+ private def getUserDN(ctx: InitialDirContext, username: String): Try[String] =
+ Try {
+ val controls = new SearchControls()
+ controls.setSearchScope(SearchControls.SUBTREE_SCOPE)
+ controls.setCountLimit(1)
+ val domainDN = domainFQDN.split("\\.").mkString("dc=", ",dc=", "")
+ val searchResult = ctx.search(domainDN, "(sAMAccountName={0})", Array[Object](username), controls)
+ if (searchResult.hasMore) searchResult.next().getNameInNamespace
+ else throw AuthenticationError("User not found in Active Directory")
+ }
+
+ def authenticate(username: String, password: String): Try[Unit] =
+ connect(domainName + "\\" + username, password)(_ => Success(()))
+
+ def changePassword(username: String, oldPassword: String, newPassword: String): Try[Unit] =
+ if (oldPassword.isEmpty || newPassword.isEmpty)
+ Failure(AuthorizationError("Change password failure"))
+ else {
+
+ val unicodeOldPassword = ("\"" + oldPassword + "\"").getBytes("UTF-16LE")
+ val unicodeNewPassword = ("\"" + newPassword + "\"").getBytes("UTF-16LE")
+ connect(domainName + "\\" + username, oldPassword) { ctx =>
+ getUserDN(ctx, username).map { userDN =>
+ val mods = Array(
+ new ModificationItem(DirContext.REMOVE_ATTRIBUTE, new BasicAttribute("unicodePwd", unicodeOldPassword)),
+ new ModificationItem(DirContext.ADD_ATTRIBUTE, new BasicAttribute("unicodePwd", unicodeNewPassword))
+ )
+ ctx.modifyAttributes(userDN, mods)
+ }
+ }
+ }
+}
+
+object ADConnection {
+
+ def apply(configuration: Configuration): ADConnection =
+ (for {
+ domainFQDN <- configuration.getOptional[String]("auth.ad.domainFQDN")
+ domainName <- configuration.getOptional[String]("auth.ad.domainName")
+ serverNames = configuration.getOptional[Seq[String]]("auth.ad.serverNames").getOrElse(Seq(domainFQDN))
+ useSSL = configuration.getOptional[Boolean]("auth.ad.useSSL").getOrElse(false)
+ } yield ADConnection(domainFQDN, domainName, serverNames, useSSL))
+ .getOrElse(ADConnection("", "", Nil, useSSL = false))
+}
+
+@Singleton
+class ADAuthSrv(adConnection: ADConnection, userSrv: UserSrv, implicit val ec: ExecutionContext) extends AuthSrv {
+
+ @Inject() def this(configuration: Configuration, userSrv: UserSrv, ec: ExecutionContext) = this(ADConnection(configuration), userSrv, ec)
+
+ private[ADAuthSrv] lazy val logger = Logger(getClass)
+ val name: String = "ad"
+ override val capabilities: Set[AuthCapability.Value] = Set(AuthCapability.changePassword)
+
+ override def authenticate(username: String, password: String)(implicit request: RequestHeader): Future[AuthContext] =
+ (for {
+ _ <- Future.fromTry(adConnection.authenticate(username, password))
+ authContext <- userSrv.getFromId(request, username, name)
+ } yield authContext)
+ .recoverWith {
+ case t =>
+ logger.error("AD authentication failure", t)
+ Future.failed(AuthenticationError("Authentication failure"))
+ }
+
+ override def changePassword(username: String, oldPassword: String, newPassword: String)(implicit authContext: AuthContext): Future[Unit] =
+ Future
+ .fromTry(adConnection.changePassword(username, oldPassword, newPassword))
+ .recoverWith {
+ case t =>
+ logger.error("AD change password failure", t)
+ Future.failed(AuthorizationError("Change password failure"))
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/services/auth/LdapAuthSrv.scala b/elastic4play/app/org/elastic4play/services/auth/LdapAuthSrv.scala
new file mode 100644
index 000000000..11ed58bd1
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/auth/LdapAuthSrv.scala
@@ -0,0 +1,132 @@
+package org.elastic4play.services.auth
+
+import java.net.ConnectException
+import java.util
+import javax.inject.{Inject, Singleton}
+import javax.naming.Context
+import javax.naming.directory._
+
+import scala.concurrent.{ExecutionContext, Future}
+import scala.util.{Failure, Success, Try}
+
+import play.api.mvc.RequestHeader
+import play.api.{Configuration, Logger}
+
+import org.elastic4play.services.{AuthCapability, _}
+import org.elastic4play.{AuthenticationError, AuthorizationError}
+
+case class LdapConnection(serverNames: Seq[String], useSSL: Boolean, bindDN: String, bindPW: String, baseDN: String, filter: String) {
+
+ private[LdapConnection] lazy val logger = Logger(classOf[LdapAuthSrv])
+
+ private val noLdapServerAvailableException = AuthenticationError("No LDAP server found")
+
+ private def isFatal(t: Throwable): Boolean = t match {
+ case null => true
+ case `noLdapServerAvailableException` => false
+ case _: ConnectException => false
+ case _ => isFatal(t.getCause)
+ }
+
+ private def connect[A](username: String, password: String)(f: InitialDirContext => Try[A]): Try[A] =
+ if (password.isEmpty) Failure(AuthenticationError("Authentication failure"))
+ else
+ serverNames.foldLeft[Try[A]](Failure(noLdapServerAvailableException)) {
+ case (Failure(e), serverName) if !isFatal(e) =>
+ val protocol = if (useSSL) "ldaps://" else "ldap://"
+ val env = new util.Hashtable[Any, Any]
+ env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory")
+ env.put(Context.PROVIDER_URL, protocol + serverName)
+ env.put(Context.SECURITY_AUTHENTICATION, "simple")
+ env.put(Context.SECURITY_PRINCIPAL, username)
+ env.put(Context.SECURITY_CREDENTIALS, password)
+ Try {
+ val ctx = new InitialDirContext(env)
+ try f(ctx)
+ finally ctx.close()
+ }.flatten
+ .recoverWith {
+ case ldapError =>
+ logger.debug("LDAP connect error", ldapError)
+ Failure(ldapError)
+ }
+ case (r, _) => r
+ }
+
+ private def getUserDN(ctx: InitialDirContext, username: String): Try[String] =
+ Try {
+ val controls = new SearchControls()
+ controls.setSearchScope(SearchControls.SUBTREE_SCOPE)
+ controls.setCountLimit(1)
+ val searchResult = ctx.search(baseDN, filter, Array[Object](username), controls)
+ if (searchResult.hasMore) searchResult.next().getNameInNamespace
+ else throw AuthenticationError("User not found in LDAP server")
+ }
+
+ def authenticate(username: String, password: String): Try[Unit] =
+ connect(bindDN, bindPW) { ctx =>
+ getUserDN(ctx, username)
+ }.flatMap { userDN =>
+ connect(userDN, password)(_ => Success(()))
+ }
+
+ def changePassword(username: String, oldPassword: String, newPassword: String): Try[Unit] =
+ connect(bindDN, bindPW) { ctx =>
+ getUserDN(ctx, username)
+ }.flatMap { userDN =>
+ connect(userDN, oldPassword) { ctx =>
+ val mods = Array(new ModificationItem(DirContext.REPLACE_ATTRIBUTE, new BasicAttribute("userPassword", newPassword)))
+ Try(ctx.modifyAttributes(userDN, mods))
+ }
+ }
+}
+
+object LdapConnection {
+
+ def apply(configuration: Configuration): LdapConnection =
+ (for {
+ bindDN <- configuration.getOptional[String]("auth.ldap.bindDN")
+ bindPW <- configuration.getOptional[String]("auth.ldap.bindPW")
+ baseDN <- configuration.getOptional[String]("auth.ldap.baseDN")
+ filter <- configuration.getOptional[String]("auth.ldap.filter")
+ serverNames = configuration.getOptional[String]("auth.ldap.serverName").fold[Seq[String]](Nil)(s => Seq(s)) ++
+ configuration.getOptional[Seq[String]]("auth.ldap.serverNames").getOrElse(Nil)
+ useSSL = configuration.getOptional[Boolean]("auth.ldap.useSSL").getOrElse(false)
+
+ } yield LdapConnection(serverNames, useSSL, bindDN, bindPW, baseDN, filter))
+ .getOrElse(LdapConnection(Nil, useSSL = false, "", "", "", ""))
+}
+
+@Singleton
+class LdapAuthSrv(ldapConnection: LdapConnection, userSrv: UserSrv, implicit val ec: ExecutionContext) extends AuthSrv {
+
+ @Inject() def this(configuration: Configuration, userSrv: UserSrv, ec: ExecutionContext) = this(LdapConnection(configuration), userSrv, ec)
+
+ private[LdapAuthSrv] lazy val logger = Logger(getClass)
+
+ val name = "ldap"
+ override val capabilities: Set[AuthCapability.Value] = Set(AuthCapability.changePassword)
+
+ override def authenticate(username: String, password: String)(implicit request: RequestHeader): Future[AuthContext] =
+ ldapConnection
+ .authenticate(username, password)
+ .map { _ =>
+ userSrv.getFromId(request, username, name)
+ }
+ .fold[Future[AuthContext]](Future.failed, identity)
+ .recoverWith {
+ case t =>
+ logger.error("LDAP authentication failure", t)
+ Future.failed(AuthenticationError("Authentication failure"))
+ }
+
+ override def changePassword(username: String, oldPassword: String, newPassword: String)(implicit authContext: AuthContext): Future[Unit] =
+ ldapConnection
+ .changePassword(username, oldPassword, newPassword)
+ .fold(Future.failed, Future.successful)
+ .recoverWith {
+ case t =>
+ logger.error("LDAP change password failure", t)
+ Future.failed(AuthorizationError("Change password failure"))
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/services/auth/MultiAuthSrv.scala b/elastic4play/app/org/elastic4play/services/auth/MultiAuthSrv.scala
new file mode 100644
index 000000000..bd0b9f031
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/services/auth/MultiAuthSrv.scala
@@ -0,0 +1,87 @@
+package org.elastic4play.services.auth
+
+import javax.inject.{Inject, Singleton}
+import org.elastic4play.AuthenticationError
+import org.elastic4play.services.AuthCapability.Type
+import org.elastic4play.services.{AuthContext, AuthSrv}
+import play.api.mvc.{RequestHeader, Result}
+import play.api.{Configuration, Logger}
+
+import scala.collection.immutable
+import scala.concurrent.{ExecutionContext, Future}
+
+object MultiAuthSrv {
+ private[MultiAuthSrv] lazy val logger = Logger(getClass)
+}
+
+@Singleton
+class MultiAuthSrv(val authProviders: Seq[AuthSrv], implicit val ec: ExecutionContext) extends AuthSrv {
+
+ @Inject() def this(configuration: Configuration, authModules: immutable.Set[AuthSrv], ec: ExecutionContext) =
+ this(
+ configuration
+ .getDeprecated[Option[Seq[String]]]("auth.provider", "auth.type")
+ .getOrElse(Nil)
+ .flatMap { authType =>
+ authModules
+ .find(_.name == authType)
+ .orElse {
+ MultiAuthSrv.logger.error(s"Authentication module $authType not found")
+ None
+ }
+ },
+ ec
+ )
+
+ val name = "multi"
+ override val capabilities: Set[Type] = authProviders.flatMap(_.capabilities).toSet
+
+ private[auth] def forAllAuthProvider[A](body: AuthSrv => Future[A]) =
+ authProviders.foldLeft(Future.failed[A](new Exception("no authentication provider found"))) { (f, a) =>
+ f.recoverWith {
+ case _ =>
+ val r = body(a)
+ r.failed.foreach(error => MultiAuthSrv.logger.debug(s"${a.name} ${error.getClass.getSimpleName} ${error.getMessage}"))
+ r
+ }
+ }
+
+ override def authenticate(username: String, password: String)(implicit request: RequestHeader): Future[AuthContext] =
+ forAllAuthProvider(_.authenticate(username, password))
+ .recoverWith {
+ case authError =>
+ MultiAuthSrv.logger.error("Authentication failure", authError)
+ Future.failed(AuthenticationError("Authentication failure"))
+ }
+
+ override def authenticate(key: String)(implicit request: RequestHeader): Future[AuthContext] =
+ forAllAuthProvider(_.authenticate(key))
+ .recoverWith {
+ case authError =>
+ MultiAuthSrv.logger.error("Authentication failure", authError)
+ Future.failed(AuthenticationError("Authentication failure"))
+ }
+
+ override def authenticate()(implicit request: RequestHeader): Future[Either[Result, AuthContext]] =
+ forAllAuthProvider(_.authenticate())
+ .recoverWith {
+ case authError =>
+ MultiAuthSrv.logger.error("Authentication failure", authError)
+ Future.failed(AuthenticationError("Authentication failure"))
+ }
+
+ override def changePassword(username: String, oldPassword: String, newPassword: String)(implicit authContext: AuthContext): Future[Unit] =
+ forAllAuthProvider(_.changePassword(username, oldPassword, newPassword))
+
+ override def setPassword(username: String, newPassword: String)(implicit authContext: AuthContext): Future[Unit] =
+ forAllAuthProvider(_.setPassword(username, newPassword))
+
+ override def renewKey(username: String)(implicit authContext: AuthContext): Future[String] =
+ forAllAuthProvider(_.renewKey(username))
+
+ override def getKey(username: String)(implicit authContext: AuthContext): Future[String] =
+ forAllAuthProvider(_.getKey(username))
+
+ override def removeKey(username: String)(implicit authContext: AuthContext): Future[Unit] =
+ forAllAuthProvider(_.removeKey(username))
+}
diff --git a/elastic4play/app/org/elastic4play/utils/Collection.scala b/elastic4play/app/org/elastic4play/utils/Collection.scala
new file mode 100644
index 000000000..2de7c6be5
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/utils/Collection.scala
@@ -0,0 +1,34 @@
+package org.elastic4play.utils
+
+import scala.util.{Failure, Success, Try}
+
+object Collection {
+
+// def distinctBy[A, B, Repr, That](xs: IterableOnce[A])(f: A => B)(implicit cbf: CanBuildFrom[Repr, A, That]): That = {
+// val builder = cbf(xs.repr)
+// val i = xs.iterator
+// var set = Set[B]()
+// while (i.hasNext) {
+// val o = i.next
+// val b = f(o)
+// if (!set(b)) {
+// set += b
+// builder += o
+// }
+// }
+// builder.result
+// }
+
+// def partitionTry[A, Repr, ThatA, ThatB](
+// xs: TraversableLike[Try[A], Repr]
+// )(implicit cbfa: CanBuildFrom[Repr, A, ThatA], cbfb: CanBuildFrom[Repr, Throwable, ThatB]): (ThatA, ThatB) = {
+// val aBuilder = cbfa()
+// val bBuilder = cbfb()
+// xs.foreach {
+// case Success(a) => aBuilder += a
+// case Failure(b) => bBuilder += b
+// }
+// (aBuilder.result(), bBuilder.result())
+// }
+
+}
diff --git a/elastic4play/app/org/elastic4play/utils/Hash.scala b/elastic4play/app/org/elastic4play/utils/Hash.scala
new file mode 100644
index 000000000..8b290d8a5
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/utils/Hash.scala
@@ -0,0 +1,80 @@
+package org.elastic4play.utils
+
+import java.nio.charset.Charset
+import java.nio.file.{Path, Paths}
+import java.security.MessageDigest
+
+import scala.concurrent.{ExecutionContext, Future}
+
+import play.api.Logger
+import play.api.libs.json.JsValue
+
+import akka.stream.scaladsl.{FileIO, Sink, Source}
+import akka.stream.{IOResult, Materializer}
+import akka.util.ByteString
+
+// TODO use play.api.libs.Codecs
+
+case class Hasher(algorithms: String*) {
+
+ def fromPath(path: Path)(implicit mat: Materializer, ec: ExecutionContext): Future[Seq[Hash]] =
+ fromSource(FileIO.fromPath(path))
+
+ def fromSource(source: Source[ByteString, Any])(implicit mat: Materializer, ec: ExecutionContext): Future[Seq[Hash]] = {
+ val mds = algorithms.map(algo => MessageDigest.getInstance(algo))
+ source
+ .runForeach { bs =>
+ mds.foreach(md => md.update(bs.toByteBuffer))
+ }
+ .map { _ =>
+ mds.map(md => Hash(md.digest()))
+ }
+ }
+
+ def fromString(data: String): Seq[Hash] =
+ fromByteArray(data.getBytes(Charset.forName("UTF8")))
+
+ def fromByteArray(data: Array[Byte]): Seq[Hash] = {
+ val mds = algorithms.map(algo => MessageDigest.getInstance(algo))
+ mds.map(md => Hash(md.digest(data)))
+ }
+
+}
+
+class MultiHash(algorithms: String)(implicit mat: Materializer, ec: ExecutionContext) {
+ private[MultiHash] lazy val logger = Logger(getClass)
+ private val md = MessageDigest.getInstance(algorithms)
+
+ def addValue(value: JsValue): Unit = {
+ md.update(0.asInstanceOf[Byte])
+ md.update(value.toString.getBytes)
+ }
+
+ def addFile(filename: String): Future[IOResult] =
+ addFile(FileIO.fromPath(Paths.get(filename))).flatMap(identity)
+
+ def addFile[A](source: Source[ByteString, A]): Future[A] = {
+ md.update(0.asInstanceOf[Byte])
+ source
+ .toMat(Sink.foreach { bs =>
+ md.update(bs.toByteBuffer)
+ })((a, done) => done.map(_ => a))
+ .run()
+ }
+ def digest: Hash = Hash(md.digest())
+}
+
+case class Hash(data: Array[Byte]) {
+ override def toString: String = data.map(b => "%02x".format(b)).mkString
+}
+
+object Hash {
+
+ def apply(s: String): Hash = Hash {
+ s.grouped(2)
+ .map { cc =>
+ (Character.digit(cc(0), 16) << 4 | Character.digit(cc(1), 16)).toByte
+ }
+ .toArray
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/utils/Instance.scala b/elastic4play/app/org/elastic4play/utils/Instance.scala
new file mode 100644
index 000000000..41399f225
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/utils/Instance.scala
@@ -0,0 +1,13 @@
+package org.elastic4play.utils
+
+import java.rmi.dgc.VMID
+import java.util.concurrent.atomic.AtomicInteger
+
+import play.api.mvc.RequestHeader
+
+object Instance {
+ val id: String = (new VMID).toString
+ private val counter = new AtomicInteger(0)
+ def getRequestId(request: RequestHeader): String = s"$id:${request.id}"
+ def getInternalId: String = s"$id::${counter.incrementAndGet}"
+}
diff --git a/elastic4play/app/org/elastic4play/utils/JsonFormat.scala b/elastic4play/app/org/elastic4play/utils/JsonFormat.scala
new file mode 100644
index 000000000..387332d0d
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/utils/JsonFormat.scala
@@ -0,0 +1,9 @@
+package org.elastic4play.utils
+
+import play.api.libs.json.{Format, JsString, Reads, Writes}
+
+object JsonFormat {
+ private val hashReads: Reads[Hash] = Reads(json => json.validate[String].map(h => Hash(h)))
+ private val hashWrites: Writes[Hash] = Writes[Hash](h => JsString(h.toString))
+ implicit val hashFormat: Format[Hash] = Format(hashReads, hashWrites)
+}
diff --git a/elastic4play/app/org/elastic4play/utils/RetryOnError.scala b/elastic4play/app/org/elastic4play/utils/RetryOnError.scala
new file mode 100644
index 000000000..c27364ce0
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/utils/RetryOnError.scala
@@ -0,0 +1,48 @@
+package org.elastic4play.utils
+
+import scala.concurrent.{ExecutionContext, Future, Promise}
+import scala.concurrent.duration.FiniteDuration
+import scala.concurrent.duration.DurationInt
+
+import play.api.Logger
+
+import akka.actor.ActorSystem
+
+object RetryOnError {
+
+ @deprecated("use Retry(Int, FiniteDuration)(Class[_]*)(⇒ Future[A])", "1.6.2")
+ def apply[A](cond: Throwable => Boolean = _ => true, maxRetry: Int = 5, initialDelay: FiniteDuration = 1.second)(
+ body: => Future[A]
+ )(implicit system: ActorSystem, ec: ExecutionContext): Future[A] =
+ body.recoverWith {
+ case e if maxRetry > 0 && cond(e) =>
+ val resultPromise = Promise[A]
+ system.scheduler.scheduleOnce(initialDelay) {
+ resultPromise.completeWith(apply(cond, maxRetry - 1, initialDelay * 2)(body))
+ }
+ resultPromise.future
+ }
+}
+
+object Retry {
+ val logger: Logger = Logger(getClass)
+
+ def exceptionCheck(exceptions: Seq[Class[_]])(t: Throwable): Boolean =
+ exceptions.exists(_.isAssignableFrom(t.getClass)) || Option(t.getCause).exists(exceptionCheck(exceptions))
+
+ def apply[T](maxRetry: Int = 5, initialDelay: FiniteDuration = 1.second)(
+ exceptions: Class[_]*
+ )(body: => Future[T])(implicit system: ActorSystem, ec: ExecutionContext): Future[T] =
+ body.recoverWith {
+ case e: Throwable if maxRetry > 0 && exceptionCheck(exceptions)(e) =>
+ logger.warn(s"An error occurs (${e.getMessage}), retrying ($maxRetry)")
+ val resultPromise = Promise[T]()
+ system.scheduler.scheduleOnce(initialDelay) {
+ resultPromise.completeWith(apply(maxRetry - 1, initialDelay * 2)(exceptions: _*)(body))
+ }
+ resultPromise.future
+ case e: Throwable if maxRetry > 0 =>
+ logger.error(s"uncatch error, not retrying", e)
+ throw e
+ }
+}
diff --git a/elastic4play/app/org/elastic4play/utils/package.scala b/elastic4play/app/org/elastic4play/utils/package.scala
new file mode 100644
index 000000000..e988cbfbf
--- /dev/null
+++ b/elastic4play/app/org/elastic4play/utils/package.scala
@@ -0,0 +1,100 @@
+package org.elastic4play
+
+import scala.collection.generic.CanBuildFrom
+import scala.concurrent.duration.{span, Duration, DurationInt, FiniteDuration}
+import scala.concurrent.{Await, ExecutionContext, Future, Promise}
+import scala.util.{Failure, Success, Try}
+
+import play.api.libs.json.{JsObject, JsValue, Writes}
+
+import akka.actor.ActorSystem
+import org.scalactic.{Bad, Good, Or}
+
+package object utils {
+ implicit class RichFuture[T](future: Future[T]) {
+
+ def withTimeout(after: FiniteDuration, default: => T)(implicit system: ActorSystem, ec: ExecutionContext): Future[T] = {
+ val prom = Promise[T]()
+ val timeout = system.scheduler.scheduleOnce(after) { prom.success(default); () }
+ future onComplete { _ =>
+ timeout.cancel()
+ }
+ Future.firstCompletedOf(List(future, prom.future))
+ }
+
+ def await(implicit duration: Duration = 10 seconds span): T = Await.result(future, duration)
+
+ def toOr[E <: Throwable](implicit evidence: Manifest[E], ec: ExecutionContext): Future[T Or E] =
+ future
+ .map(g => Good(g))
+ .recoverWith { case evidence(error) => Future.successful(Bad(error)) }
+
+ def toTry(implicit ec: ExecutionContext): Future[Try[T]] =
+ future
+ .map(r => Success(r))
+ .recover { case t => Failure(t) }
+ }
+
+ implicit class RichJson(obj: JsObject) {
+
+ def setIfAbsent[T](name: String, value: T)(implicit writes: Writes[T]): JsObject =
+ if (obj.keys.contains(name))
+ obj
+ else
+ obj + (name -> writes.writes(value))
+
+ def mapValues(f: JsValue => JsValue): JsObject =
+ JsObject(obj.fields.map {
+ case (key, value) => key -> f(value)
+ })
+
+ def map(f: (String, JsValue) => (String, JsValue)): JsObject =
+ obj
+ .fields
+ .map(kv => JsObject(Seq(f.tupled(kv))))
+ .reduceOption(_ deepMerge _)
+ .getOrElse(JsObject.empty)
+
+ def collectValues(pf: PartialFunction[JsValue, JsValue]): JsObject =
+ JsObject(obj.fields.collect {
+ case (key, value) if pf.isDefinedAt(value) => key -> pf(value)
+ })
+
+ def collect(pf: PartialFunction[(String, JsValue), (String, JsValue)]): JsObject = JsObject(obj.fields.collect(pf))
+ }
+
+ implicit class RichOr[G, B](or: Or[G, B]) {
+ def toFuture(implicit evidence: B <:< Throwable): Future[G] = or.fold(g => Future.successful(g), b => Future.failed(b))
+ }
+
+// implicit class RichTryIterable[A, Repr](xs: TraversableLike[Try[A], Repr]) {
+//
+// def partitionTry[ThatA, ThatB](implicit cbfa: CanBuildFrom[Repr, A, ThatA], cbfb: CanBuildFrom[Repr, Throwable, ThatB]): (ThatA, ThatB) = {
+// val aBuilder = cbfa()
+// val bBuilder = cbfb()
+// xs.foreach {
+// case Success(a) => aBuilder += a
+// case Failure(b) => bBuilder += b
+// }
+// (aBuilder.result(), bBuilder.result())
+// }
+//
+// }
+// implicit class RichOrIterable[A, B, Repr](xs: TraversableLike[A Or B, Repr]) {
+//
+// def partitionOr[ThatA, ThatB](implicit cbfa: CanBuildFrom[Repr, A, ThatA], cbfb: CanBuildFrom[Repr, B, ThatB]): (ThatA, ThatB) = {
+// val aBuilder = cbfa()
+// val bBuilder = cbfb()
+// xs.foreach {
+// case Good(a) => aBuilder += a
+// case Bad(b) => bBuilder += b
+// }
+// (aBuilder.result(), bBuilder.result())
+// }
+// }
+//
+// implicit class RichTuble[A, B](t: (A, B)) {
+// def map1[C](f: A => C): (C, B) = (f(t._1), t._2)
+// def map2[C](f: B => C): (A, C) = (t._1, f(t._2))
+// }
+}
diff --git a/elastic4play/conf/reference.conf b/elastic4play/conf/reference.conf
new file mode 100644
index 000000000..c75bb9c4f
--- /dev/null
+++ b/elastic4play/conf/reference.conf
@@ -0,0 +1,3 @@
+# handler for errors (transform exception to related http status code
+play.http.errorHandler = org.elastic4play.ErrorHandler
+
diff --git a/elastic4play/project/build.properties b/elastic4play/project/build.properties
new file mode 100644
index 000000000..10fd9eee0
--- /dev/null
+++ b/elastic4play/project/build.properties
@@ -0,0 +1 @@
+sbt.version=1.5.5
diff --git a/elastic4play/project/plugins.sbt b/elastic4play/project/plugins.sbt
new file mode 100644
index 000000000..e1ab2b430
--- /dev/null
+++ b/elastic4play/project/plugins.sbt
@@ -0,0 +1,8 @@
+// Comment to get more information during initialization
+logLevel := Level.Info
+
+// Use the Play sbt plugin for Play projects
+addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.8.16")
+addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6")
+addSbtPlugin("org.thehive-project" % "sbt-github-changelog" % "0.4.0")
+addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.10")
diff --git a/elastic4play/test/common/Fabricator.scala b/elastic4play/test/common/Fabricator.scala
new file mode 100644
index 000000000..eddc61652
--- /dev/null
+++ b/elastic4play/test/common/Fabricator.scala
@@ -0,0 +1,22 @@
+package common
+
+import scala.util.Random
+import play.api.libs.json.{JsBoolean, JsNumber, JsObject, JsString, JsValue}
+
+object Fabricator {
+ def string(prefix: String = "", size: Int = 10): String = prefix + Random.alphanumeric.take(size).mkString
+ def int: Int = Random.nextInt()
+ def boolean: Boolean = Random.nextBoolean()
+ def long: Long = Random.nextLong()
+
+ def jsValue: JsValue = int % 4 match {
+ case 0 => JsNumber(long)
+ case 1 => JsBoolean(boolean)
+ case _ => JsString(string())
+ }
+
+ def jsObject(maxSize: Int = 10): JsObject = {
+ val fields = Seq.fill(int % maxSize)(string() -> jsValue)
+ JsObject(fields)
+ }
+}
diff --git a/elastic4play/test/org/elastic4play/database/DBCreateSpec.scala b/elastic4play/test/org/elastic4play/database/DBCreateSpec.scala
new file mode 100644
index 000000000..62e5ee3c6
--- /dev/null
+++ b/elastic4play/test/org/elastic4play/database/DBCreateSpec.scala
@@ -0,0 +1,106 @@
+//package org.elastic4play.database
+//
+//import scala.concurrent.ExecutionContext.Implicits.{global ⇒ ec}
+//import scala.concurrent.Future
+//
+//import play.api.libs.json.{JsObject, JsString, Json}
+//import play.api.test.PlaySpecification
+//
+//import com.sksamuel.elastic4s.http.index.IndexResponse
+//import com.sksamuel.elastic4s.http.ElasticDsl.IndexHandler
+//import com.sksamuel.elastic4s.indexes.IndexRequest
+//import common.{Fabricator ⇒ F}
+//import org.junit.runner.RunWith
+//import org.specs2.mock.Mockito
+//import org.specs2.runner.JUnitRunner
+//
+//import org.elastic4play.models.BaseEntity
+//import org.elastic4play.utils._
+//
+//@RunWith(classOf[JUnitRunner])
+//class DBCreateSpec extends PlaySpecification with Mockito {
+// val modelName: String = F.string("modelName")
+// val defaultEntityId: String = F.string("defaultEntityId")
+// val sampleDoc: JsObject = Json.obj("caseId" → 42, "title" → "Test case", "description" → "Case used for unit test", "tags" → Seq("test", "specs"))
+//
+// class DBCreateWrapper {
+// val db: DBConfiguration = mock[DBConfiguration]
+// val dbcreate = new DBCreate(db, ec)
+//
+// def apply(modelName: String, attributes: JsObject): (JsObject, IndexRequest) = {
+// val indexResponse = mock[IndexResponse]
+// indexResponse.id returns (attributes \ "_id").asOpt[String].getOrElse(defaultEntityId)
+// db.execute(any[IndexRequest]) returns Future.successful(indexResponse)
+// val attrs = dbcreate(modelName, attributes).await
+// val captor = capture[IndexRequest]
+// there was one(db).execute(captor.capture)
+// (attrs, captor.value)
+// }
+//
+// def apply(parent: BaseEntity, attributes: JsObject): (JsObject, IndexRequest) = {
+// val indexResponse = mock[IndexResponse]
+// indexResponse.id returns (attributes \ "_id").asOpt[String].getOrElse(defaultEntityId)
+// db.execute(any[IndexRequest]) returns Future.successful(indexResponse)
+// val attrs = dbcreate(modelName, Some(parent), attributes).await
+// val captor = capture[IndexRequest]
+// there was one(db).execute(captor.capture)
+// (attrs, captor.value)
+// }
+// }
+//
+// "DBCreate" should {
+// "create document without id, parent or routing" in {
+// val dbcreate = new DBCreateWrapper
+// val (returnAttrs, indexDef) = dbcreate(modelName, sampleDoc)
+// (returnAttrs \ "_type").asOpt[String] must beSome(modelName)
+// (returnAttrs \ "_id").asOpt[String] must beSome(defaultEntityId)
+// (returnAttrs \ "_routing").asOpt[String] must beSome(defaultEntityId)
+// (returnAttrs \ "_parent").asOpt[String] must beNone
+// indexDef.id must beNone
+// indexDef.parent must beNone
+// indexDef.routing must beNone
+// }
+//
+// "create document with id, parent and routing" in {
+// val entityId = F.string("entityId")
+// val routing = F.string("routing")
+// val parentId = F.string("parentId")
+// val dbcreate = new DBCreateWrapper()
+// val (returnAttrs, indexDef) = dbcreate(
+// modelName,
+// sampleDoc +
+// ("_id" → JsString(entityId)) +
+// ("_routing" → JsString(routing)) +
+// ("_parent" → JsString(parentId))
+// )
+//
+// (returnAttrs \ "_type").asOpt[String] must beSome(modelName)
+// (returnAttrs \ "_id").asOpt[String] must beSome(entityId)
+// (returnAttrs \ "_routing").asOpt[String] must beSome(routing)
+// (returnAttrs \ "_parent").asOpt[String] must beSome(parentId)
+// indexDef.id must beSome(entityId)
+// indexDef.parent must beSome(parentId)
+// indexDef.routing must beSome(routing)
+// }
+//
+// "create document with id and parent entity" in {
+// val entityId = F.string("entityId")
+// val routing = F.string("routing")
+// val parentId = F.string("parentId")
+//
+// val dbcreate = new DBCreateWrapper()
+// val parent = mock[BaseEntity]
+// parent.id returns parentId
+// parent.routing returns routing
+// val (returnAttrs, indexDef) = dbcreate(parent, sampleDoc + ("_id" → JsString(entityId)))
+//
+// (returnAttrs \ "_type").asOpt[String] must beSome(modelName)
+// (returnAttrs \ "_id").asOpt[String] must beSome(entityId)
+// (returnAttrs \ "_routing").asOpt[String] must beSome(routing)
+// (returnAttrs \ "_parent").asOpt[String] must beSome(parentId)
+// indexDef.id must beSome(entityId)
+// indexDef.parent must beSome(parentId)
+// indexDef.routing must beSome(routing)
+// }
+// }
+//}
diff --git a/elastic4play/test/org/elastic4play/database/DBFindSpec.scala b/elastic4play/test/org/elastic4play/database/DBFindSpec.scala
new file mode 100644
index 000000000..19c722c47
--- /dev/null
+++ b/elastic4play/test/org/elastic4play/database/DBFindSpec.scala
@@ -0,0 +1,148 @@
+package org.elastic4play.database
+
+import akka.actor.ActorSystem
+import akka.stream.Materializer
+import akka.stream.testkit.scaladsl.TestSink
+import com.sksamuel.elastic4s.ElasticDsl.SearchHandler
+import com.sksamuel.elastic4s.requests.searches.{SearchHit, SearchHits, SearchRequest, SearchResponse, Total}
+import org.elastic4play.utils._
+import org.junit.runner.RunWith
+import org.specs2.mock.Mockito
+import org.specs2.runner.JUnitRunner
+import play.api.Application
+import play.api.inject.guice.GuiceApplicationBuilder
+import play.api.test.PlaySpecification
+
+import scala.concurrent.ExecutionContext.Implicits.{global => ec}
+import scala.concurrent.Future
+import scala.concurrent.duration._
+
+@RunWith(classOf[JUnitRunner])
+class DBFindSpec extends PlaySpecification with Mockito {
+
+ lazy val app: Application = new GuiceApplicationBuilder().build()
+ implicit lazy val mat: Materializer = app.materializer
+ implicit lazy val as: ActorSystem = app.actorSystem
+
+ val pageSize = 5
+ val keepAlive: FiniteDuration = 1.minute
+
+ "DBFind" should {
+ "if range is not provided, use offset:0 and limit:10" in {
+ val db = mock[DBConfiguration]
+ val dbfind = new DBFind(pageSize, keepAlive, db, mat)
+ dbfind.getOffsetAndLimitFromRange(None) must_== ((0, 10))
+ }
+
+ "if range is 75, use offset:75 and limit:10" in {
+ val db = mock[DBConfiguration]
+ val dbfind = new DBFind(pageSize, keepAlive, db, mat)
+ dbfind.getOffsetAndLimitFromRange(Some("75")) must_== ((75, 10))
+ }
+
+ "if range is 75-NaN, use it as offset:75 and limit:10" in {
+ val db = mock[DBConfiguration]
+ val dbfind = new DBFind(pageSize, keepAlive, db, mat)
+ dbfind.getOffsetAndLimitFromRange(Some("75-NaN")) must_== ((75, 10))
+ }
+
+ "if range is NaN, use it as offset:0 and limit:10" in {
+ val db = mock[DBConfiguration]
+ val dbfind = new DBFind(pageSize, keepAlive, db, mat)
+ dbfind.getOffsetAndLimitFromRange(Some("NaN")) must_== ((0, 10))
+ }
+
+ "if range is 75-32, use it as offset:75 and limit:10" in {
+ val db = mock[DBConfiguration]
+ val dbfind = new DBFind(pageSize, keepAlive, db, mat)
+ dbfind.getOffsetAndLimitFromRange(Some("75-32")) must_== ((75, 10))
+ }
+
+ "if range is 75-100, use it as offset:75 and limit:25" in {
+ val db = mock[DBConfiguration]
+ val dbfind = new DBFind(pageSize, keepAlive, db, mat)
+ dbfind.getOffsetAndLimitFromRange(Some("75-100")) must_== ((75, 25))
+ }
+
+ "if range is all, use it as offset:0 and limit:Int.MaxValue" in {
+ val db = mock[DBConfiguration]
+ val dbfind = new DBFind(pageSize, keepAlive, db, mat)
+ dbfind.getOffsetAndLimitFromRange(Some("all")) must_== ((0, Int.MaxValue))
+ }
+
+// "execute search using scroll" in {
+// val db = mock[DBConfiguration].verbose
+// val dbfind = new DBFind(pageSize, keepAlive, db, mat)
+// val searchDef = mock[SearchRequest]
+// searchDef.limit(pageSize) returns searchDef
+// searchDef.scroll(dbfind.keepAliveStr) returns searchDef
+// val firstPageResult = mock[SearchResponse]
+// val scrollId = F.string("scrollId")
+// val hits = Range(0, 24).map { i =>
+// val m = mock[SearchHit]
+// m.toString returns s"MockResult-$i"
+// m
+// }.toArray
+// firstPageResult.scrollId returns Some(scrollId)
+// firstPageResult.totalHits returns hits.length.toLong
+// firstPageResult.isTimedOut returns false
+// firstPageResult.isEmpty returns false
+// firstPageResult.hits returns SearchHits(24, 0, hits.take(5))
+// db.execute(searchDef)(implicitly[Handler[SearchRequest, SearchResponse]], implicitly[Manifest[SearchResponse]]) returns Future.successful(firstPageResult)
+//
+// val secondPageResult = mock[SearchResponse]
+// secondPageResult.scrollId returns Some(scrollId)
+// secondPageResult.isTimedOut returns false
+// secondPageResult.isEmpty returns false
+// secondPageResult.hits returns SearchHits(24, 0, hits.drop(5))
+// db.execute(SearchScrollRequest(scrollId, Some(keepAlive.toString)))(implicitly[Handler[SearchScrollRequest, SearchResponse]], implicitly[Manifest[SearchResponse]]) returns Future.successful(secondPageResult)
+//
+// val (src, total) = dbfind.searchWithScroll(searchDef, 8, 10)
+// src
+// .runWith(TestSink.probe[SearchHit])
+// .request(2)
+// .expectNextN(hits.slice(8, 10).toList)
+// .request(5)
+// .expectNextN(hits.slice(10, 13).toList)
+// .request(10)
+// .expectNextN(hits.slice(13, 18).toList)
+// .expectComplete
+//
+// total.await must_== hits.length
+// there was one(db).execute(searchDef)
+// there was one(db).execute(any[SearchScrollRequest])
+// // FIXME there was one(db).execute(any[ClearScrollDefinition])
+// }
+
+ "execute search without scroll" in {
+ val db = mock[DBConfiguration]
+ db.indexName returns "index-test"
+ val dbfind = new DBFind(pageSize, keepAlive, db, mat)
+ val limit = 24
+ val offset = 3
+ val hits = Array.fill(limit)(mock[SearchHit])
+ val searchDef = mock[SearchRequest]
+ searchDef.limit(limit) returns searchDef
+ searchDef.start(offset) returns searchDef
+ val results = mock[SearchResponse]
+ //db.execute(searchDef) returns Future.successful(results)
+ doReturn(Future.successful(results)).when(db).execute(searchDef)
+ results.totalHits returns 42
+ results.hits returns SearchHits(Total(24, "eq"), 0, hits)
+
+ val (src, total) = dbfind.searchWithoutScroll(searchDef, offset, limit)
+ src
+ .runWith(TestSink.probe[SearchHit])
+ .request(2)
+ .expectNextN(hits.take(2).toList)
+ .request(10)
+ .expectNextN(hits.slice(2, 12).toList)
+ .request(15)
+ .expectNextN(hits.drop(12).toList)
+ .expectComplete()
+
+ total.await must_== 42
+ there was one(db).execute(searchDef)
+ }
+ }
+}
diff --git a/elastic4play/test/org/elastic4play/database/DBGetSpec.scala b/elastic4play/test/org/elastic4play/database/DBGetSpec.scala
new file mode 100644
index 000000000..59adb50ed
--- /dev/null
+++ b/elastic4play/test/org/elastic4play/database/DBGetSpec.scala
@@ -0,0 +1,51 @@
+//package org.elastic4play.database
+//
+//import scala.concurrent.ExecutionContext.Implicits.{ global ⇒ ec }
+//import scala.concurrent.Future
+//
+//import play.api.libs.json.Json.toJsFieldJsValueWrapper
+//import play.api.libs.json.{ JsNull, Json }
+//import play.api.test.PlaySpecification
+//
+//import com.sksamuel.elastic4s.{ RichSearchHit, RichSearchResponse, SearchDefinition }
+//import org.junit.runner.RunWith
+//import org.specs2.mock.Mockito
+//import org.specs2.runner.JUnitRunner
+//
+//import org.elastic4play.utils.RichFuture
+//
+//@RunWith(classOf[JUnitRunner])
+//class DBGetSpec extends PlaySpecification with Mockito {
+//
+// "DBGet" should {
+// "retrieve document" in {
+// val db = mock[DBConfiguration]
+// val dbget = new DBGet(db, ec)
+// db.indexName returns "testIndex"
+// val modelName = "user"
+// val entityId = "me"
+//
+// val searchDefinition = capture[SearchDefinition]
+// val response = mock[RichSearchResponse]
+// val searchHit = mock[RichSearchHit]
+// response.hits returns Array(searchHit)
+// searchHit.id returns entityId
+// searchHit.`type` returns modelName
+// searchHit.fields returns Map.empty
+//
+// db.execute(searchDefinition.capture) returns Future.successful(response)
+// dbget(modelName, entityId).await must_== Json.obj(
+// "_type" → modelName,
+// "_routing" → entityId,
+// "_parent" → JsNull,
+// "_id" → entityId)
+//
+// Json.parse(searchDefinition.value._builder.toString) must_== Json.obj(
+// "query" → Json.obj(
+// "ids" → Json.obj(
+// "type" → "user",
+// "values" → Seq("me"))),
+// "fields" → Seq("_source", "_routing", "_parent"))
+// }
+// }
+//}
diff --git a/elastic4play/test/org/elastic4play/database/DBModifySpec.scala b/elastic4play/test/org/elastic4play/database/DBModifySpec.scala
new file mode 100644
index 000000000..a11c33745
--- /dev/null
+++ b/elastic4play/test/org/elastic4play/database/DBModifySpec.scala
@@ -0,0 +1,51 @@
+package org.elastic4play.database
+
+import java.util.{Map => JMap}
+
+import org.elastic4play.models.BaseEntity
+import org.junit.runner.RunWith
+import org.specs2.matcher.ValueCheck.typedValueCheck
+import org.specs2.mock.Mockito
+import org.specs2.runner.JUnitRunner
+import play.api.libs.json.Json.toJsFieldJsValueWrapper
+import play.api.libs.json.{JsArray, JsNull, Json}
+import play.api.test.PlaySpecification
+
+import scala.jdk.CollectionConverters._
+
+@RunWith(classOf[JUnitRunner])
+class DBModifySpec extends PlaySpecification with Mockito {
+
+ "DBModify" should {
+ "build correct update script" in {
+ val db = mock[DBConfiguration]
+ val dbmodify = new DBModify(db)
+ val attributes = Json.obj(
+ "obj" -> Json.obj("subAttr1" -> 1),
+ "arr" -> Seq("a", "b", "c"),
+ "num" -> 42,
+ "str" -> "blah",
+ "bool" -> false,
+ "sub.attr.str" -> "subValue",
+ "n" -> JsNull,
+ "sub.attr.remove" -> JsArray(),
+ "remove" -> JsArray()
+ )
+ val script = dbmodify.buildScript(mock[BaseEntity], attributes)
+
+ script.script must_=== """
+ ctx._source["obj"]=params.param0;
+ ctx._source["arr"]=params.param1;
+ ctx._source["num"]=params.param2;
+ ctx._source["str"]=params.param3;
+ ctx._source["bool"]=params.param4;
+ ctx._source["sub"]["attr"]["str"]=params.param5;
+ ctx._source["n"]=null;
+ ctx._source["sub"]["attr"].remove("remove");
+ ctx._source.remove("remove")""".filterNot(c => "\n ".contains(c))
+ script.params - "param0" - "param1" must_=== Map("param2" -> 42, "param3" -> "blah", "param4" -> false, "param5" -> "subValue")
+ script.params("param0").asInstanceOf[JMap[_, _]].asScala must_== Map("subAttr1" -> 1)
+ script.params("param1").asInstanceOf[Array[Any]].toSeq must contain(exactly[Any]("a", "b", "c"))
+ }
+ }
+}
diff --git a/elastic4play/test/org/elastic4play/models/CustomAttributeSpec.scala b/elastic4play/test/org/elastic4play/models/CustomAttributeSpec.scala
new file mode 100644
index 000000000..c35d54898
--- /dev/null
+++ b/elastic4play/test/org/elastic4play/models/CustomAttributeSpec.scala
@@ -0,0 +1,36 @@
+package org.elastic4play.models
+
+import play.api.libs.json.{JsNumber, Json}
+import play.api.test.PlaySpecification
+
+import org.junit.runner.RunWith
+import org.scalactic.Good
+import org.specs2.mock.Mockito
+import org.specs2.runner.JUnitRunner
+
+@RunWith(classOf[JUnitRunner])
+class CustomAttributeSpec extends PlaySpecification with Mockito {
+ "a custom fields attribute" should {
+ "accept valid JSON object" in {
+ val js = Json.obj("field1" -> Json.obj("number" -> 12), "field2" -> Json.obj("string" -> "plop"), "field3" -> Json.obj("boolean" -> true))
+ CustomAttributeFormat.checkJsonForCreation(Nil, js) must_=== Good(js)
+ }
+
+ "refuse invalid JSON object" in {
+ val js = Json.obj("field1" -> Json.obj("number" -> "str"), "field2" -> Json.obj("string" -> 12), "field3" -> Json.obj("boolean" -> 45))
+ val result = CustomAttributeFormat.checkJsonForCreation(Nil, js)
+ result.isBad must_=== true
+ }
+
+ "accept update a single field" in {
+ val js = Json.obj("number" -> 14)
+ CustomAttributeFormat.checkJsonForUpdate(Seq("field-name"), js) must_=== Good(js)
+ }
+
+ "accept update a single value" in {
+ val js = JsNumber(15)
+ CustomAttributeFormat.checkJsonForUpdate(Seq("field-name", "number"), js) must_=== Good(js)
+ }
+
+ }
+}
diff --git a/elastic4play/test/org/elastic4play/services/CreateSrvSpec.scala b/elastic4play/test/org/elastic4play/services/CreateSrvSpec.scala
new file mode 100644
index 000000000..cb7bfce5a
--- /dev/null
+++ b/elastic4play/test/org/elastic4play/services/CreateSrvSpec.scala
@@ -0,0 +1,98 @@
+package org.elastic4play.services
+
+import java.util.{Date, UUID}
+
+import org.elastic4play.controllers.JsonInputValue
+import org.elastic4play.database.DBCreate
+import org.elastic4play.models.{Attribute, EntityDef, ModelDef, AttributeFormat => F}
+import org.elastic4play.utils.RichFuture
+import org.elastic4play.{AttributeCheckingError, InvalidFormatAttributeError, MissingAttributeError, UnknownAttributeError}
+import org.junit.runner.RunWith
+import org.specs2.mock.Mockito
+import org.specs2.runner.JUnitRunner
+import play.api.libs.json._
+import play.api.test.PlaySpecification
+
+@RunWith(classOf[JUnitRunner])
+class CreateSrvSpec extends PlaySpecification with Mockito {
+
+ class TestModel extends ModelDef[TestModel, TestEntity]("testModel", "TestModel", "/test") {
+ val textAttribute: Attribute[String] = attribute("textAttribute", F.textFmt, "textAttribute")
+ val stringAttribute: Attribute[String] = attribute("stringAttribute", F.stringFmt, "stringAttribute")
+ val dateAttribute: Attribute[Date] = attribute("dateAttribute", F.dateFmt, "dateAttribute")
+ val booleanAttribute: Attribute[Boolean] = attribute("booleanAttribute", F.booleanFmt, "booleanAttribute")
+ val uuidAttribute: Attribute[UUID] = attribute("uuidAttribute", F.uuidFmt, "uuidAttribute")
+ val hashAttribute: Attribute[String] = attribute("hashAttribute", F.hashFmt, "hashAttribute")
+ val metricAttribute: Attribute[JsValue] = attribute("metricAttribute", F.metricsFmt, "metricAttribute")
+ }
+ class TestEntity(model: TestModel, attributes: JsObject) extends EntityDef[TestModel, TestEntity](model, attributes)
+ val fieldsSrv: FieldsSrv = mock[FieldsSrv]
+ val dbCreate: DBCreate = mock[DBCreate]
+ val eventSrv: EventSrv = mock[EventSrv]
+ val attachmentSrv: AttachmentSrv = mock[AttachmentSrv]
+ val createSrv = new CreateSrv(fieldsSrv, dbCreate, eventSrv, attachmentSrv)
+ val model = new TestModel
+
+ "CreateSrv.checkAttributes" should {
+ "return attributes if there is correct" in {
+ val attrs = Json.obj(
+ "textAttribute" -> "valid text",
+ "stringAttribute" -> "valid string",
+ "dateAttribute" -> "20160128T175800+0100",
+ "booleanAttribute" -> true,
+ "uuidAttribute" -> "ee0caf69-560b-4453-9bae-72982225e661",
+ "hashAttribute" -> "01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b",
+ "metricAttribute" -> Json.obj("metric1" -> 1, "metric2" -> 2),
+ "createdAt" -> "20160620T162845+0200",
+ "createdBy" -> "testUser"
+ )
+
+ createSrv.checkAttributes(attrs, model).await must_== attrs
+ }
+
+ "returns errors if attribute format is invalid" in {
+ val attrs = Json.obj(
+ "textAttribute" -> true,
+ // "stringAttribute" -> 2134,
+ "dateAttribute" -> "2016-01-28",
+ "booleanAttribute" -> "true",
+ "uuidAttribute" -> "ee0caf69560b44539bae72982225e661",
+ "hashAttribute" -> "01ba471-invalid-9c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b",
+ "metricAttribute" -> Json.obj("metric1" -> "blah", "metric2" -> 2),
+ "unknownAttribute" -> 1,
+ "metricAttribute.metric3" -> 3,
+ "metricAttribute.metric3" -> "aze"
+ )
+
+ createSrv.checkAttributes(attrs, model).await must throwA[AttributeCheckingError].like {
+ case AttributeCheckingError(_, errors) =>
+ errors must contain( //exactly[Throwable](
+ InvalidFormatAttributeError("textAttribute", model.textAttribute.format.name, JsonInputValue(JsBoolean(true))),
+ InvalidFormatAttributeError("dateAttribute", model.dateAttribute.format.name, JsonInputValue(JsString("2016-01-28"))),
+ InvalidFormatAttributeError("booleanAttribute", model.booleanAttribute.format.name, JsonInputValue(JsString("true"))),
+ InvalidFormatAttributeError(
+ "uuidAttribute",
+ model.uuidAttribute.format.name,
+ JsonInputValue(JsString("ee0caf69560b44539bae72982225e661"))
+ ),
+ InvalidFormatAttributeError(
+ "hashAttribute",
+ model.hashAttribute.format.name,
+ JsonInputValue(JsString("01ba471-invalid-9c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b"))
+ ),
+ InvalidFormatAttributeError(
+ "metricAttribute",
+ model.metricAttribute.format.name,
+ JsonInputValue(Json.obj("metric1" -> "blah", "metric2" -> 2))
+ ),
+ UnknownAttributeError("unknownAttribute", JsNumber(1)),
+ MissingAttributeError("stringAttribute"),
+ MissingAttributeError("createdBy"),
+ UnknownAttributeError("metricAttribute.metric3", JsString("aze"))
+ )
+
+ }
+ }
+ }
+
+}
diff --git a/elastic4play/test/org/elastic4play/services/DeleteSrvSpec.scala b/elastic4play/test/org/elastic4play/services/DeleteSrvSpec.scala
new file mode 100644
index 000000000..07f524852
--- /dev/null
+++ b/elastic4play/test/org/elastic4play/services/DeleteSrvSpec.scala
@@ -0,0 +1,99 @@
+package org.elastic4play.services
+
+import java.util.{Date, UUID}
+
+import scala.concurrent.ExecutionContext.Implicits.{global => ec}
+import scala.concurrent.Future
+import play.api.libs.json.Json.toJsFieldJsValueWrapper
+import play.api.libs.json.{JsObject, JsValue, Json}
+import play.api.test.PlaySpecification
+import org.junit.runner.RunWith
+import org.specs2.mock.Mockito
+import org.specs2.runner.JUnitRunner
+import org.elastic4play.NotFoundError
+import org.elastic4play.database.DBRemove
+import org.elastic4play.models.{Attribute, EntityDef, ModelDef, AttributeFormat => F}
+import org.elastic4play.utils.RichFuture
+
+@RunWith(classOf[JUnitRunner])
+class DeleteSrvSpec extends PlaySpecification with Mockito {
+
+ class TestModel extends ModelDef[TestModel, TestEntity]("testModel", "TestModel", "/test") {
+ val textAttribute: Attribute[String] = attribute("textAttribute", F.textFmt, "textAttribute")
+ val stringAttribute: Attribute[String] = attribute("stringAttribute", F.stringFmt, "stringAttribute")
+ val dateAttribute: Attribute[Date] = attribute("dateAttribute", F.dateFmt, "dateAttribute")
+ val booleanAttribute: Attribute[Boolean] = attribute("booleanAttribute", F.booleanFmt, "booleanAttribute")
+ val uuidAttribute: Attribute[UUID] = attribute("uuidAttribute", F.uuidFmt, "uuidAttribute")
+ val hashAttribute: Attribute[String] = attribute("hashAttribute", F.hashFmt, "hashAttribute")
+ val metricAttribute: Attribute[JsValue] = attribute("metricAttribute", F.metricsFmt, "metricAttribute")
+ }
+ class TestEntity(model: TestModel, attributes: JsObject) extends EntityDef[TestModel, TestEntity](model, attributes)
+
+ implicit val authContext: AuthContext = mock[AuthContext]
+
+ val model = new TestModel
+
+ val entity = new TestEntity(
+ model,
+ Json.obj(
+ "_id" -> "42",
+ "_routing" -> "42",
+ "_type" -> "testModel",
+ "_seqNo" -> 1,
+ "_primaryTerm" -> 1,
+ "textAttribute" -> "valid text",
+ "stringAttribute" -> "valid string",
+ "dateAttribute" -> "20160128T175800+0100",
+ "booleanAttribute" -> true,
+ "uuidAttribute" -> "ee0caf69-560b-4453-9bae-72982225e661",
+ "hashAttribute" -> "01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b",
+ "metricAttribute" -> Json.obj("metric1" -> 1, "metric2" -> 2),
+ "user" -> "testUser",
+ "createdAt" -> "20160620T162845+0200",
+ "createdBy" -> "testUser"
+ )
+ )
+
+ "DeleteSrv.realDelete" should {
+ "remove entity if exists" in {
+ val updateSrv = mock[UpdateSrv]
+ val getSrv = mock[GetSrv]
+ val dbRemove = mock[DBRemove]
+ val eventSrv = mock[EventSrv]
+ val deleteSrv = new DeleteSrv(updateSrv, getSrv, dbRemove, eventSrv)
+
+ val id = "42"
+ getSrv[TestModel, TestEntity](model, id) returns Future.successful(entity)
+ dbRemove(entity) returns Future.successful(true)
+ deleteSrv.realDelete[TestModel, TestEntity](model, id).await must not(throwA[Exception])
+ there was one(dbRemove).apply(entity)
+ }
+
+ "returns error if entity can't be retrieve" in {
+ val updateSrv = mock[UpdateSrv]
+ val getSrv = mock[GetSrv]
+ val dbRemove = mock[DBRemove]
+ val eventSrv = mock[EventSrv]
+ val deleteSrv = new DeleteSrv(updateSrv, getSrv, dbRemove, eventSrv)
+
+ val id = "42"
+ val error = NotFoundError(s"${model.modelName} $id not found")
+ getSrv[TestModel, TestEntity](model, id) returns Future.failed(error)
+ deleteSrv.realDelete[TestModel, TestEntity](model, id).await must throwA[NotFoundError]
+ }
+
+ "returns error if entity is not found" in {
+ val updateSrv = mock[UpdateSrv]
+ val getSrv = mock[GetSrv]
+ val dbRemove = mock[DBRemove]
+ val eventSrv = mock[EventSrv]
+ val deleteSrv = new DeleteSrv(updateSrv, getSrv, dbRemove, eventSrv)
+
+ val id = "42"
+ getSrv[TestModel, TestEntity](model, id) returns Future.successful(entity)
+ dbRemove(entity) returns Future.successful(false)
+ deleteSrv.realDelete[TestModel, TestEntity](model, id).await must throwA[NotFoundError]
+ }
+ }
+
+}
diff --git a/elastic4play/test/org/elastic4play/services/FindSrvSpec.scala b/elastic4play/test/org/elastic4play/services/FindSrvSpec.scala
new file mode 100644
index 000000000..d94331c3b
--- /dev/null
+++ b/elastic4play/test/org/elastic4play/services/FindSrvSpec.scala
@@ -0,0 +1,56 @@
+//package org.elastic4play.services
+//
+//import play.api.libs.json.Json
+//import play.api.test.PlaySpecification
+//
+//import com.sksamuel.elastic4s.ElasticDsl.{ matchAllQuery, search }
+//import com.sksamuel.elastic4s.IndexesAndTypes.apply
+//import org.junit.runner.RunWith
+//import org.specs2.mock.Mockito
+//import org.specs2.runner.JUnitRunner
+//
+//import org.elastic4play.models.BaseModelDef
+//
+//@RunWith(classOf[JUnitRunner])
+//class FindSrvSpec extends PlaySpecification with Mockito {
+//
+// val indexName = "myIndex"
+// val documentType = "myDocument"
+//
+// "GroupByCategory" should {
+// "generate correct elasticsearch query" in {
+// import org.elastic4play.services.QueryDSL._
+// val catAgg = new GroupByCategory(Map(
+// "debug" → ("level" ~= "DEBUG"),
+// "info" → ("level" ~= "INFO"),
+// "warn" → ("level" ~= "WARN")), Seq(selectCount))
+//
+// val query = search(indexName → documentType).matchAllQuery.aggregations(catAgg(mock[BaseModelDef]))
+//
+// Json.parse(query._builder.toString) must_== Json.parse("""
+// {
+// "query": {
+// "match_all": {}
+// },
+// "aggregations": {
+// "categories": {
+// "filters": {
+// "filters": {
+// "debug": { "term": { "level": "DEBUG" } },
+// "info": { "term": { "level": "INFO" } },
+// "warn": { "term": { "level": "WARN" } }
+// }
+// },
+// "aggregations": {
+// "count": {
+// "filter": {
+// "match_all": {}
+// }
+// }
+// }
+// }
+// }
+// }""")
+// }
+// }
+//}
diff --git a/elastic4play/test/org/elastic4play/services/UpdateSrvSpec.scala b/elastic4play/test/org/elastic4play/services/UpdateSrvSpec.scala
new file mode 100644
index 000000000..b83cf485b
--- /dev/null
+++ b/elastic4play/test/org/elastic4play/services/UpdateSrvSpec.scala
@@ -0,0 +1,97 @@
+package org.elastic4play.services
+
+import java.util.{Date, UUID}
+
+import org.elastic4play.controllers.JsonInputValue
+import org.elastic4play.database.DBModify
+import org.elastic4play.models.{Attribute, EntityDef, ModelDef, AttributeFormat => F}
+import org.elastic4play.utils.RichFuture
+import org.elastic4play.{AttributeCheckingError, InvalidFormatAttributeError, UnknownAttributeError}
+import org.junit.runner.RunWith
+import org.specs2.mock.Mockito
+import org.specs2.runner.JUnitRunner
+import play.api.libs.json._
+import play.api.test.PlaySpecification
+
+@RunWith(classOf[JUnitRunner])
+class UpdateSrvSpec extends PlaySpecification with Mockito {
+
+ class TestModel extends ModelDef[TestModel, TestEntity]("testModel", "TestModel", "/test") {
+ val textAttribute: Attribute[String] = attribute("textAttribute", F.textFmt, "textAttribute")
+ val stringAttribute: Attribute[String] = attribute("stringAttribute", F.stringFmt, "stringAttribute")
+ val dateAttribute: Attribute[Date] = attribute("dateAttribute", F.dateFmt, "dateAttribute")
+ val booleanAttribute: Attribute[Boolean] = attribute("booleanAttribute", F.booleanFmt, "booleanAttribute")
+ val uuidAttribute: Attribute[UUID] = attribute("uuidAttribute", F.uuidFmt, "uuidAttribute")
+ val hashAttribute: Attribute[String] = attribute("hashAttribute", F.hashFmt, "hashAttribute")
+ val metricAttribute: Attribute[JsValue] = attribute("metricAttribute", F.metricsFmt, "metricAttribute")
+ val multiAttibute: Attribute[Seq[String]] = multiAttribute("multiAttribute", F.stringFmt, "multiAttribute")
+ }
+ class TestEntity(model: TestModel, attributes: JsObject) extends EntityDef[TestModel, TestEntity](model, attributes)
+ val fieldsSrv: FieldsSrv = mock[FieldsSrv]
+ val dbModify: DBModify = mock[DBModify]
+ val eventSrv: EventSrv = mock[EventSrv]
+ val getSrv: GetSrv = mock[GetSrv]
+ val attachmentSrv: AttachmentSrv = mock[AttachmentSrv]
+ val updateSrv = new UpdateSrv(fieldsSrv, dbModify, getSrv, attachmentSrv, eventSrv)
+ val model = new TestModel
+
+ "UpdateSrv.checkAttributes" should {
+ "return attributes if there is correct" in {
+ val attrs = Json.obj(
+ "textAttribute" -> "valid text",
+ "stringAttribute" -> "valid string",
+ "dateAttribute" -> "20160128T175800+0100",
+ "booleanAttribute" -> true,
+ "uuidAttribute" -> "ee0caf69-560b-4453-9bae-72982225e661",
+ "hashAttribute" -> "01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b",
+ "metricAttribute" -> Json.obj("metric1" -> 1, "metric2" -> 2),
+ "metricAttribute.metric3" -> 3
+ )
+
+ updateSrv.checkAttributes(attrs, model).await must_== attrs
+ }
+
+ "returns errors if attribute format is invalid" in {
+ val attrs = Json.obj(
+ "textAttribute" -> true,
+ // "stringAttribute" -> 2134,
+ "dateAttribute" -> "2016-01-28",
+ "booleanAttribute" -> "true",
+ "uuidAttribute" -> "ee0caf69560b44539bae72982225e661",
+ "hashAttribute" -> "01ba471-invalid-9c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b",
+ "metricAttribute" -> Json.obj("metric1" -> "blah", "metric2" -> 2),
+ "unknownAttribute" -> 1,
+ "metricAttribute.metric3" -> "aze",
+ "multiAttribute" -> "single value"
+ )
+
+ updateSrv.checkAttributes(attrs, model).await must throwA[AttributeCheckingError].like {
+ case AttributeCheckingError(_, errors) =>
+ errors must contain( //exactly[AttributeError](
+ InvalidFormatAttributeError("textAttribute", model.textAttribute.format.name, JsonInputValue(JsBoolean(true))),
+ InvalidFormatAttributeError("dateAttribute", model.dateAttribute.format.name, JsonInputValue(JsString("2016-01-28"))),
+ InvalidFormatAttributeError("booleanAttribute", model.booleanAttribute.format.name, JsonInputValue(JsString("true"))),
+ InvalidFormatAttributeError(
+ "uuidAttribute",
+ model.uuidAttribute.format.name,
+ JsonInputValue(JsString("ee0caf69560b44539bae72982225e661"))
+ ),
+ InvalidFormatAttributeError(
+ "hashAttribute",
+ model.hashAttribute.format.name,
+ JsonInputValue(JsString("01ba471-invalid-9c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b"))
+ ),
+ InvalidFormatAttributeError(
+ "metricAttribute",
+ model.metricAttribute.format.name,
+ JsonInputValue(Json.obj("metric1" -> "blah", "metric2" -> 2))
+ ),
+ UnknownAttributeError("unknownAttribute", JsNumber(1)),
+ InvalidFormatAttributeError("metricAttribute", "number", JsonInputValue(JsString("aze"))),
+ InvalidFormatAttributeError("multiAttribute", "multi-string", JsonInputValue(JsString("single value")))
+ )
+ }
+ }
+ }
+
+}
diff --git a/project/Common.scala b/project/Common.scala
index 2f4b4b9a5..27d25b0dd 100644
--- a/project/Common.scala
+++ b/project/Common.scala
@@ -13,15 +13,14 @@ object Common {
resolvers += "elasticsearch-releases" at "https://artifacts.elastic.co/maven",
scalaVersion := Dependencies.scalaVersion,
scalacOptions ++= Seq(
+ "-encoding",
+ "UTF-8",
"-deprecation", // Emit warning and location for usages of deprecated APIs.
"-feature", // Emit warning and location for usages of features that should be imported explicitly.
"-unchecked", // Enable additional warnings where generated code depends on assumptions.
//"-Xfatal-warnings", // Fail the compilation if there are any warnings.
"-Xlint", // Enable recommended additional warnings.
- "-Ywarn-adapted-args", // Warn if an argument list is modified to match the receiver.
"-Ywarn-dead-code", // Warn when dead code is identified.
- "-Ywarn-inaccessible", // Warn about inaccessible types in method signatures.
- "-Ywarn-nullary-override", // Warn when non-nullary overrides nullary, e.g. def foo() over def foo.
"-Ywarn-numeric-widen" // Warn when numerics are widened.
),
Test / scalacOptions ~= { options =>
diff --git a/project/Dependencies.scala b/project/Dependencies.scala
index 490bfaee0..052f12b1d 100644
--- a/project/Dependencies.scala
+++ b/project/Dependencies.scala
@@ -1,7 +1,7 @@
-import sbt.*
+import sbt._
object Dependencies {
- val scalaVersion = "2.12.16"
+ val scalaVersion = "2.13.15"
val dockerJavaVersion = "3.4.0"
object Play {
@@ -13,16 +13,14 @@ object Dependencies {
val specs2 = "com.typesafe.play" %% "play-specs2" % version
val filters = "com.typesafe.play" %% "filters-helpers" % version
val guice = "com.typesafe.play" %% "play-guice" % version
- val twirl = "com.typesafe.play" %% "twirl-api" % "1.5.2"
+ val twirl = "com.typesafe.play" %% "twirl-api" % "1.6.8"
}
- val scalaGuice = "net.codingwell" %% "scala-guice" % "5.1.1"
+ val scalaGuice = "net.codingwell" %% "scala-guice" % "6.0.0"
val reflections = "org.reflections" % "reflections" % "0.10.2"
val zip4j = "net.lingala.zip4j" % "zip4j" % "2.11.5"
- val elastic4play = "org.thehive-project" %% "elastic4play" % "1.13.6"
- val dockerClient = "com.spotify" % "docker-client" % "8.16.0"
- val dockerJavaClient = "com.github.docker-java" % "docker-java" % dockerJavaVersion
+ val dockerJavaClient = "com.github.docker-java" % "docker-java-core" % dockerJavaVersion
val dockerJavaTransport = "com.github.docker-java" % "docker-java-transport-zerodep" % dockerJavaVersion
val akkaCluster = "com.typesafe.akka" %% "akka-cluster" % play.core.PlayVersion.akkaVersion
val akkaClusterTyped = "com.typesafe.akka" %% "akka-cluster-typed" % play.core.PlayVersion.akkaVersion
diff --git a/project/build.properties b/project/build.properties
index 40b3b8e7b..0b699c305 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -1 +1 @@
-sbt.version=1.9.0
+sbt.version=1.10.2
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 758cc8da9..56641174d 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -3,7 +3,7 @@ logLevel := Level.Info
evictionErrorLevel := util.Level.Warn
// The Play plugin
-addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.8.19")
+addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.9.5")
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6")
-addSbtPlugin("org.thehive-project" % "sbt-github-changelog" % "0.4.0")
-addSbtPlugin("io.github.siculo" %% "sbt-bom" % "0.3.0")
+//addSbtPlugin("org.thehive-project" % "sbt-github-changelog" % "0.4.0")
+//addSbtPlugin("io.github.siculo" %% "sbt-bom" % "0.3.0")