From a2168f19e1169452d61d754eb2af5c5fea9564ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?C=C3=A9line=20Pelletier?= <82821620+celinepelletier@users.noreply.github.com> Date: Wed, 27 Nov 2024 15:03:44 -0500 Subject: [PATCH] feat: SKFP-1339 add custom mainDestination for enrich variant (#243) --- .../ferlab/datalake/spark3/genomics/enriched/Variants.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datalake-spark3/src/main/scala/bio/ferlab/datalake/spark3/genomics/enriched/Variants.scala b/datalake-spark3/src/main/scala/bio/ferlab/datalake/spark3/genomics/enriched/Variants.scala index d68f44d9..bb6d1fb0 100644 --- a/datalake-spark3/src/main/scala/bio/ferlab/datalake/spark3/genomics/enriched/Variants.scala +++ b/datalake-spark3/src/main/scala/bio/ferlab/datalake/spark3/genomics/enriched/Variants.scala @@ -32,8 +32,8 @@ import java.time.LocalDateTime case class Variants(rc: RuntimeETLContext, participantId: Column = col("participant_id"), affectedStatus: Column = col("affected_status"), filterSnv: Option[Column] = Some(col("has_alt")), snvDatasetId: String, splits: Seq[OccurrenceSplit], extraAggregations: Seq[Column] = Nil, - checkpoint: Boolean = false, spliceAi: Boolean = true) extends SimpleSingleETL(rc) { - override val mainDestination: DatasetConf = conf.getDataset("enriched_variants") + checkpoint: Boolean = false, spliceAi: Boolean = true, destinationDataSetId: String = "enriched_variants") extends SimpleSingleETL(rc) { + override val mainDestination: DatasetConf = conf.getDataset(destinationDataSetId) if (checkpoint) { spark.sparkContext.setCheckpointDir(s"${mainDestination.rootPath}/checkpoints") }