Skip to content
This repository has been archived by the owner on Sep 2, 2022. It is now read-only.

Commit

Permalink
Merge pull request #2215 from graphcool/DeployWarnings
Browse files Browse the repository at this point in the history
Deploy should issue warnings on dangerous schema changes
  • Loading branch information
dpetrick authored Apr 17, 2018
2 parents 69f6771 + f2c5706 commit 8ff3fe0
Show file tree
Hide file tree
Showing 68 changed files with 3,283 additions and 451 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -45,18 +45,17 @@ object SlickExtensions {
unwrapSome(param) match {
case param: String => sql"$param"
case param: PlayJsValue => sql"${param.toString}"
// case param: SprayJsValue => sql"${param.compactPrint}"
case param: Boolean => sql"$param"
case param: Int => sql"$param"
case param: Long => sql"$param"
case param: Float => sql"$param"
case param: Double => sql"$param"
case param: BigInt => sql"#${param.toString}"
case param: BigDecimal => sql"#${param.toString}"
case param: DateTime => sql"${param.toString(DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS").withZoneUTC())}"
case None => sql"NULL"
case null => sql"NULL"
case _ => throw new IllegalArgumentException("Unsupported scalar value in SlickExtensions: " + param.toString)
case param: Boolean => sql"$param"
case param: Int => sql"$param"
case param: Long => sql"$param"
case param: Float => sql"$param"
case param: Double => sql"$param"
case param: BigInt => sql"#${param.toString}"
case param: BigDecimal => sql"#${param.toString}"
case param: DateTime => sql"${param.toString(DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS").withZoneUTC())}"
case None => sql"NULL"
case null => sql"NULL"
case _ => throw new IllegalArgumentException("Unsupported scalar value in SlickExtensions: " + param.toString)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,7 @@ case class MySqlDeployConnector(clientDatabase: Database)(implicit ec: Execution

override def getAllDatabaseSizes(): Future[Vector[DatabaseSize]] = {
val action = {
val query = sql"""
SELECT table_schema, sum( data_length + index_length) / 1024 / 1024 FROM information_schema.TABLES GROUP BY table_schema
"""
val query = sql"""SELECT table_schema, sum( data_length + index_length) / 1024 / 1024 FROM information_schema.TABLES GROUP BY table_schema"""
query.as[(String, Double)].map { tuples =>
tuples.map { tuple =>
DatabaseSize(tuple._1, tuple._2)
Expand All @@ -52,7 +50,7 @@ case class MySqlDeployConnector(clientDatabase: Database)(implicit ec: Execution
internalDatabaseRoot.run(action)
}

override def reset(): Future[Unit] = truncateTablesInDatabse(internalDatabase)
override def reset(): Future[Unit] = truncateTablesInDatabase(internalDatabase)

override def shutdown() = {
for {
Expand All @@ -65,7 +63,7 @@ case class MySqlDeployConnector(clientDatabase: Database)(implicit ec: Execution
trait TableTruncationHelpers {
// copied from InternalTestDatabase

protected def truncateTablesInDatabse(database: Database)(implicit ec: ExecutionContext): Future[Unit] = {
protected def truncateTablesInDatabase(database: Database)(implicit ec: ExecutionContext): Future[Unit] = {
for {
schemas <- database.run(getTables("graphcool"))
_ <- database.run(dangerouslyTruncateTables(schemas))
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package com.prisma.deploy.connector.mysql.database

import com.prisma.shared.models.Field
import slick.jdbc.SQLActionBuilder
import com.prisma.shared.models.{Field, Model}
import slick.jdbc.{PositionedParameters, SQLActionBuilder}
import slick.jdbc.MySQLProfile.api._

object DatabaseQueryBuilder {
Expand All @@ -10,8 +10,8 @@ object DatabaseQueryBuilder {
sql"select exists (select `id` from `#$projectId`.`#$modelName`)"
}

def existsByRelation(projectId: String, relationId: String): SQLActionBuilder = {
sql"select exists (select `id` from `#$projectId`.`#$relationId`)"
def existsByRelation(projectId: String, relationTableName: String): SQLActionBuilder = {
sql"select exists (select `id` from `#$projectId`.`#$relationTableName`)"
}

def existsNullByModelAndScalarField(projectId: String, modelName: String, fieldName: String) = {
Expand All @@ -20,12 +20,60 @@ object DatabaseQueryBuilder {
}

def existsNullByModelAndRelationField(projectId: String, modelName: String, field: Field) = {
val relationId = field.relation.get.relationTableName
val relationSide = field.relationSide.get.toString
val relationTableName = field.relation.get.relationTableName
val relationSide = field.relationSide.get.toString

sql"""select EXISTS (
select `id`from `#$projectId`.`#$modelName`
where `id` Not IN
(Select `#$projectId`.`#$relationId`.#$relationSide from `#$projectId`.`#$relationId`)
(Select `#$projectId`.`#$relationTableName`.#$relationSide from `#$projectId`.`#$relationTableName`)
)"""
}

def enumValueIsInUse(projectId: String, models: Vector[Model], enumName: String, value: String) = {

val nameTuples = for {
model <- models
field <- model.fields
if field.enum.isDefined && field.enum.get.name == enumName
} yield {
if (field.isList) ("nodeId", s"${model.name}_${field.name}", "value", value) else ("id", model.name, field.name, value)
}

val checks: Vector[SQLActionBuilder] = nameTuples.map { tuple =>
sql"""(Select Exists (
Select `#${tuple._1}`
From `#$projectId`.`#${tuple._2}`
Where `#${tuple._3}` = ${tuple._4}) as existanceCheck)"""
}

val unionized = combineBy(checks, "Union")

sql"""Select Exists (
Select existanceCheck
From(""" concat unionized concat sql""") as combined
Where existanceCheck = 1)"""
}

def combineBy(actions: Iterable[SQLActionBuilder], combinator: String): Option[SQLActionBuilder] = actions.toList match {
case Nil => None
case head :: Nil => Some(head)
case _ => Some(actions.reduceLeft((a, b) => a concat sql"#$combinator" concat b))
}

implicit class SQLActionBuilderConcat(val a: SQLActionBuilder) extends AnyVal {
def concat(b: SQLActionBuilder): SQLActionBuilder = {
SQLActionBuilder(a.queryParts ++ " " ++ b.queryParts, (p: Unit, pp: PositionedParameters) => {
a.unitPConv.apply(p, pp)
b.unitPConv.apply(p, pp)
})
}
def concat(b: Option[SQLActionBuilder]): SQLActionBuilder = b match {
case Some(b) => a concat b
case None => a
}

def ++(b: SQLActionBuilder): SQLActionBuilder = concat(b)
def ++(b: Option[SQLActionBuilder]): SQLActionBuilder = concat(b)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,16 @@ case class ClientDbQueriesImpl(project: Project, clientDatabase: Database)(impli
clientDatabase.run(readOnlyBoolean(query)).map(_.head).recover { case _: java.sql.SQLSyntaxErrorException => false }
}

def existsNullByModelAndScalarField(model: Model, field: Field): Future[Boolean] = {
val query = DatabaseQueryBuilder.existsNullByModelAndScalarField(project.id, model.name, field.name)
def existsNullByModelAndField(model: Model, field: Field): Future[Boolean] = {
val query = field.isScalar match {
case true => DatabaseQueryBuilder.existsNullByModelAndScalarField(project.id, model.name, field.name)
case false => DatabaseQueryBuilder.existsNullByModelAndRelationField(project.id, model.name, field)
}
clientDatabase.run(readOnlyBoolean(query)).map(_.head).recover { case _: java.sql.SQLSyntaxErrorException => false }
}

def existsNullByModelAndRelationField(model: Model, field: Field): Future[Boolean] = {
val query = DatabaseQueryBuilder.existsNullByModelAndRelationField(project.id, model.name, field)
override def enumValueIsInUse(models: Vector[Model], enumName: String, value: String): Future[Boolean] = {
val query = DatabaseQueryBuilder.enumValueIsInUse(project.id, models, enumName, value)
clientDatabase.run(readOnlyBoolean(query)).map(_.head).recover { case _: java.sql.SQLSyntaxErrorException => false }
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ trait SpecBase extends BeforeAndAfterEach with BeforeAndAfterAll with AwaitUtils
val projectId = name + "@" + stage
val project = newTestProject(projectId)
projectPersistence.create(project).await()

val migration = Migration.empty(project.id)
val result = migrationPersistence.create(migration).await()
migrationPersistence.updateMigrationStatus(result.id, MigrationStatus.Success).await()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,6 @@ case class DatabaseSize(name: String, total: Double)
trait ClientDbQueries {
def existsByModel(modelName: String): Future[Boolean]
def existsByRelation(relationId: String): Future[Boolean]
def existsNullByModelAndScalarField(model: Model, field: Field): Future[Boolean]
def existsNullByModelAndRelationField(model: Model, field: Field): Future[Boolean]
def existsNullByModelAndField(model: Model, field: Field): Future[Boolean]
def enumValueIsInUse(models: Vector[Model], enumName: String, value: String): Future[Boolean]
}
Original file line number Diff line number Diff line change
Expand Up @@ -51,15 +51,17 @@ case class MigrationStepMapperImpl(projectId: String) extends MigrationStepMappe
lazy val deleteScalarListTable = DeleteScalarListTable(projectId, model.name, previous.name, previous.typeIdentifier)

() match {
case _ if previous.isRelation && next.isRelation => Vector.empty
case _ if previous.isRelation && next.isScalarNonList => Vector(createColumn)
case _ if previous.isRelation && next.isScalarList => Vector(createScalarListTable)
case _ if previous.isScalarList && next.isScalarList => Vector(updateScalarListTable)
case _ if previous.isScalarList && next.isScalarNonList => Vector(createColumn, deleteScalarListTable)
case _ if previous.isScalarList && next.isRelation => Vector(deleteScalarListTable)
case _ if previous.isScalarNonList && next.isScalarNonList => Vector(updateColumn)
case _ if previous.isScalarNonList && next.isScalarList => Vector(createScalarListTable, deleteColumn)
case _ if previous.isScalarNonList && next.isRelation => Vector(deleteColumn)
case _ if previous.isRelation && next.isRelation => Vector.empty
case _ if previous.isRelation && next.isScalarNonList => Vector(createColumn)
case _ if previous.isRelation && next.isScalarList => Vector(createScalarListTable)
case _ if previous.isScalarList && next.isScalarNonList => Vector(createColumn, deleteScalarListTable)
case _ if previous.isScalarList && next.isRelation => Vector(deleteScalarListTable)
case _ if previous.isScalarNonList && next.isScalarList => Vector(createScalarListTable, deleteColumn)
case _ if previous.isScalarNonList && next.isRelation => Vector(deleteColumn)
case _ if previous.isScalarNonList && next.isScalarNonList && previous.typeIdentifier == next.typeIdentifier => Vector(updateColumn)
case _ if previous.isScalarList && next.isScalarList && previous.typeIdentifier == next.typeIdentifier => Vector(updateScalarListTable)
case _ if previous.isScalarNonList && next.isScalarNonList => Vector(deleteColumn, createColumn)
case _ if previous.isScalarList && next.isScalarList => Vector(deleteScalarListTable, createScalarListTable)
}

case x: CreateRelation =>
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import com.prisma.api.connector.DataResolver
import com.prisma.api.util.StringMatchers
import com.prisma.api.{ApiDependenciesForTest, ApiTestDatabase, ApiTestServer}
import com.prisma.api.{ApiDependenciesForTest, ApiTestServer}
import com.prisma.deploy.specutils.{DeployTestDependencies, DeployTestServer}
import com.prisma.shared.models.{Migration, Project}
import com.prisma.utils.await.AwaitUtils
Expand All @@ -22,7 +22,7 @@ trait IntegrationBaseSpec extends BeforeAndAfterEach with BeforeAndAfterAll with

override protected def afterAll(): Unit = {
super.afterAll()
deployTestDependencies.deployPersistencePlugin.shutdown().await()
deployTestDependencies.deployPersistencePlugin.shutdown().await
apiTestDependencies.destroy
}

Expand All @@ -32,7 +32,6 @@ trait IntegrationBaseSpec extends BeforeAndAfterEach with BeforeAndAfterAll with

implicit lazy val apiTestDependencies = new ApiDependenciesForTest
val apiServer = ApiTestServer()
val apiDatabase = ApiTestDatabase()

def dataResolver(project: Project): DataResolver = apiTestDependencies.dataResolver(project)

Expand All @@ -42,6 +41,7 @@ trait IntegrationBaseSpec extends BeforeAndAfterEach with BeforeAndAfterAll with

val deployServer = DeployTestServer()
val projectsToCleanUp = new ArrayBuffer[String]
val internalDB = deployTestDependencies.deployPersistencePlugin

val basicTypesGql =
"""
Expand All @@ -57,7 +57,7 @@ trait IntegrationBaseSpec extends BeforeAndAfterEach with BeforeAndAfterAll with

override protected def beforeEach(): Unit = {
super.beforeEach()
deployTestDependencies.deployPersistencePlugin.reset().await
projectsToCleanUp.foreach(id => internalDB.deleteProjectDatabase(id).await)
projectsToCleanUp.clear()
}

Expand All @@ -69,7 +69,7 @@ trait IntegrationBaseSpec extends BeforeAndAfterEach with BeforeAndAfterAll with
): (Project, Migration) = {

val projectId = name + "@" + stage
projectsToCleanUp :+ projectId
projectsToCleanUp += projectId
deployServer.addProject(name, stage)
deployServer.deploySchema(name, stage, schema.stripMargin, secrets)
}
Expand Down
Loading

0 comments on commit 8ff3fe0

Please sign in to comment.