Open geekthanos opened 6 years ago
@kolapkardhaval can you provide more details? I need the whole env info.
I'm getting this error now, too.
Here's what I'm running:
application.conf:
training {
postgres {
dataSourceClass = "org.postgresql.ds.PGSimpleDataSource"
properties {
databaseName ="jobs"
user = "jobs"
password = "jobs"
}
}
}
My Tables.scala:
trait TrainingPostgresProfile extends ExPostgresProfile with PgEnumSupport
with PgArraySupport
with PgDate2Support
with PgRangeSupport
with PgHStoreSupport
with PgSearchSupport
with PgNetSupport
with PgLTreeSupport {
override val api = TrainingDbApi
trait TrainingEnumImplicits {
implicit val jobTypeMapper = createEnumJdbcType("jobtype", JobType)
implicit val jobTypeListTypeMapper = createEnumListJdbcType("jobtype", JobType)
implicit val jobTypeColumnExtensionMethodsBuilder = createEnumColumnExtensionMethodsBuilder(JobType)
implicit val jobTypeOptionColumnExtensionMethodsBuilder = createEnumOptionColumnExtensionMethodsBuilder(JobType)
implicit val jobStatusMapper = createEnumJdbcType("training_status", TrainingJobStatus)
implicit val jobStatusListTypeMapper = createEnumListJdbcType("training_status", TrainingJobStatus)
implicit val jobStatusColumnExtensionMethodsBuilder = createEnumColumnExtensionMethodsBuilder(TrainingJobStatus)
implicit val jobStatusOptionColumnExtensionMethodsBuilder = createEnumOptionColumnExtensionMethodsBuilder(TrainingJobStatus)
implicit val jobPriorityMapper = createEnumJdbcType("priority", JobPriority)
implicit val jobPriorityListTypeMapper = createEnumListJdbcType("priority", JobPriority)
implicit val jobPriorityColumnExtensionMethodsBuilder = createEnumColumnExtensionMethodsBuilder(JobPriority)
implicit val jobPriorityOptionColumnExtensionMethodsBuilder = createEnumOptionColumnExtensionMethodsBuilder(JobPriority)
}
object TrainingDbApi extends API with ArrayImplicits
with DateTimeImplicits
with NetImplicits
with LTreeImplicits
with RangeImplicits
with HStoreImplicits
with SearchImplicits
with SearchAssistants
with TrainingEnumImplicits {
implicit val strListTypeMapper = new SimpleArrayJdbcType[String]("text").to(_.toList)
}
}
object TrainingPostgresProfile extends TrainingPostgresProfile
/** Stand-alone Slick data model for immediate use */
/** Slick data model trait for extension, choice of backend or usage in the cake pattern. (Make sure to initialize this late.) */
trait TrainingTableAccess {
val profile: TrainingPostgresProfile = TrainingPostgresProfile
import profile.api._
val db = Database.forConfig("training.postgres")
/** Collection-like TableQuery object for table Jobs */
case class TrainingJobsRow(requestId: String,
requester: String,
callback: String,
jobType: JobType,
jobParams: String,
status: TrainingJobStatus,
priority: JobPriority,
viewPermissions: List[String],
killPermissions: List[String],
lastUpdated: Timestamp)
val jobsTable = TableQuery[Training]
class Training(tag: Tag) extends Table[TrainingJobsRow](tag, "training_jobs") {
val requestId: Rep[String] = column[String]("request_id", O.PrimaryKey)
val requester: Rep[String] = column[String]("requester")
val callback: Rep[String] = column[String]("callback")
val jobParams: Rep[String] = column[String]("job_params")
val jobType: Rep[JobType] = column[JobType]("job_type")
val status: Rep[TrainingJobStatus] = column[TrainingJobStatus]("status")
val priority: Rep[JobPriority] = column[JobPriority]("priority")
val viewPermissions: Rep[List[String]] = column[List[String]]("view_permissions")
val killPermissions: Rep[List[String]] = column[List[String]]("kill_permissions")
val lastUpdated: Rep[java.sql.Timestamp] = column[java.sql.Timestamp]("last_updated")
/** Database column last_updated SqlType(timestamp) */
def * = (requestId, requester, callback, jobType, jobParams, status, priority, viewPermissions, killPermissions, lastUpdated) <> (TrainingJobsRow.tupled, TrainingJobsRow.unapply)
}
def jobsRowToJob(jobRow: TrainingJobsRow)(implicit objectMapper: ObjectMapper): TrainingJob = {
val params: JobParams = jobRow.jobType match {
case JobType.SPARK => objectMapper.readValue(jobRow.jobParams, classOf[SparkJobParams])
case JobType.CONTAINER => objectMapper.readValue(jobRow.jobParams, classOf[ContainerJobParams])
}
TrainingJob(
requestId = jobRow.requestId,
requester = jobRow.requester,
callbackUrl = jobRow.callback,
jobParams = params,
jobType = jobRow.jobType,
status = jobRow.status,
lastUpdate = jobRow.lastUpdated,
metadata = JobMetadata(
permissions = JobPermissions(
view = jobRow.viewPermissions.toSet,
kill = jobRow.killPermissions.toSet),
priority = jobRow.priority
)
)
}
def jobToJobsRow(job: TrainingJob)(implicit objectMapper: ObjectMapper): TrainingJobsRow = TrainingJobsRow(
requester = job.requester,
requestId = job.requestId,
callback = job.callbackUrl,
jobParams = objectMapper.writeValueAsString(job.jobParams),
jobType = job.jobType,
status = job.status,
lastUpdated = job.lastUpdate,
priority = job.metadata.priority,
viewPermissions = job.metadata.permissions.view.toList,
killPermissions = job.metadata.permissions.kill.toList
)
def findRowByRequestId(reqId: String): Future[Option[TrainingJobsRow]] = db.run(
jobsTable.filter(_.requestId === reqId)
.result
.headOption
)
def findByRequestId(reqId: String)(implicit objectMapper: ObjectMapper): Future[Option[TrainingJob]] = findRowByRequestId(reqId) map {
case Some(row: TrainingJobsRow) => Some(jobsRowToJob(row))
case None => None
}
}
I get this when I do an insert
I have very similar code in another project that does not get this error. I'm perplexed.
Slick version 3.2.3, otherwise:
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.2.2</version>
</dependency>
<dependency>
<groupId>com.github.tminglei</groupId>
<artifactId>slick-pg_${scala.compat.version}</artifactId>
<version>0.16.1</version>
</dependency>
<dependency>
<groupId>com.github.tminglei</groupId>
<artifactId>slick-pg_joda-time_${scala.compat.version}</artifactId>
<version>0.16.1</version>
</dependency>
</dependencies>
scala 2.11
Well, I'm also confused. I can't find obvious issues in it. Give me a reproducable sample project, pls, to allow me to dig into.
So, I wasn't able to reproduce it in a separate repository without including proprietary stuff, but here's what happens: Basically, it works in the persistence module I make. In other modules that depend on that module, it doesn't work. When I use that persistence module, which depends on 42.2.2 of org.postgresql:postgresql, the module attempting to use it somehow gets forced, by the module itself, to use 9.4.1212?! even though that modeling-service-persistence itself uses 42.2.2. Totally stupid.
By adding an exclude for org.postgresql:postgresql to the modeling-service-persistence dependency, and explicitly adding a dependency to 42.2.2 to the server module, which is stupid and shouldn't have to be done, I can fix the problem.
here's the mvn dependency:tree: [INFO] com.mycompany:modeling-service-server:jar:1.0-SNAPSHOT [INFO] - com.mycompany:modeling-service-persistence:jar:1.0-SNAPSHOT:compile [INFO] - org.postgresql:postgresql:jar:9.4.1212.jre7:compile
@monkeymantra yes, that's it, you resolved my confusion. 👍
@tminglei , I am getting this below error in 0.16.1
Exception in thread "db-17" java.lang.NoSuchMethodError: org.postgresql.jdbc.PgConnection.createArrayOf(Ljava/lang/String;Ljava/lang/Object;)Ljava/sql/Array; at com.github.tminglei.slickpg.array.PgArrayJdbcTypes$SimpleArrayJdbcType.mkArray(PgArrayJdbcTypes.scala:42) at com.github.tminglei.slickpg.array.PgArrayJdbcTypes$SimpleArrayJdbcType.setValue(PgArrayJdbcTypes.scala:31) at com.github.tminglei.slickpg.array.PgArrayJdbcTypes$SimpleArrayJdbcType.setValue(PgArrayJdbcTypes.scala:13) at com.github.tminglei.slickpg.array.PgArrayJdbcTypes$WrappedConvArrayJdbcType.setValue(PgArrayJdbcTypes.scala:107) at com.github.tminglei.slickpg.array.PgArrayJdbcTypes$WrappedConvArrayJdbcType.setValue(PgArrayJdbcTypes.scala:98) at slick.jdbc.BaseResultConverter.set(JdbcResultConverter.scala:16) at slick.jdbc.BaseResultConverter.set(JdbcResultConverter.scala:8) at slick.relational.ProductResultConverter.set(ResultConverter.scala:68) at slick.relational.ProductResultConverter.set(ResultConverter.scala:43) at slick.relational.TypeMappingResultConverter.set(ResultConverter.scala:135) at slick.jdbc.JdbcActionComponent$InsertActionComposerImpl$SingleInsertAction.$anonfun$run$11(JdbcActionComponent.scala:509) at slick.jdbc.JdbcBackend$SessionDef.withPreparedInsertStatement(JdbcBackend.scala:393) at slick.jdbc.JdbcBackend$SessionDef.withPreparedInsertStatement$(JdbcBackend.scala:390) at slick.jdbc.JdbcBackend$BaseSession.withPreparedInsertStatement(JdbcBackend.scala:448) at slick.jdbc.JdbcActionComponent$ReturningInsertActionComposerImpl.preparedInsert(JdbcActionComponent.scala:650) at slick.jdbc.JdbcActionComponent$InsertActionComposerImpl$SingleInsertAction.run(JdbcActionComponent.scala:507) at slick.jdbc.JdbcActionComponent$SimpleJdbcProfileAction.run(JdbcActionComponent.scala:30) at slick.jdbc.JdbcActionComponent$SimpleJdbcProfileAction.run(JdbcActionComponent.scala:27) at slick.basic.BasicBackend$DatabaseDef$$anon$2.liftedTree1$1(BasicBackend.scala:275) at slick.basic.BasicBackend$DatabaseDef$$anon$2.run(BasicBackend.scala:275) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745)