From d28c97704c9323f23cce52f58529e6fff59f6cbb Mon Sep 17 00:00:00 2001 From: Sergey Nazarov Date: Mon, 13 Jan 2025 21:33:39 +0300 Subject: [PATCH] Bumped dependencies (#3981) --- .github/workflows/push-default-branch.yml | 8 +- build.sbt | 2 +- .../com/wavesplatform/lang/FileCompiler.scala | 6 +- .../v1/evaluator/ctx/impl/CryptoContext.scala | 7 +- .../wavesplatform/ResponsivenessLogs.scala | 99 +++++++++---------- .../api/common/CommonAccountsApi.scala | 4 +- .../database/RocksDBWriter.scala | 4 +- .../com/wavesplatform/database/package.scala | 4 +- .../com/wavesplatform/network/Handshake.scala | 12 +-- .../com/wavesplatform/utils/package.scala | 6 +- project/Dependencies.scala | 30 +++--- project/build.properties | 2 +- project/plugins.sbt | 2 +- 13 files changed, 94 insertions(+), 92 deletions(-) diff --git a/.github/workflows/push-default-branch.yml b/.github/workflows/push-default-branch.yml index b66983c2cc6..1bc039564a2 100644 --- a/.github/workflows/push-default-branch.yml +++ b/.github/workflows/push-default-branch.yml @@ -11,6 +11,12 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '11' + cache: 'sbt' + - uses: sbt/setup-sbt@v1 - uses: scalacenter/sbt-dependency-submission@v3 with: configs-ignore: test compile-internal provided @@ -33,5 +39,5 @@ jobs: SONATYPE_USERNAME: ${{ secrets.OSSRH_USERNAME }} PGP_PASSPHRASE: ${{ secrets.OSSRH_GPG_PASSPHRASE }} run: | - version=$(echo ${{ github.ref }} | awk -F '[-.]' '{print $2"."$3}')-SNAPSHOT + version=$(awk -F '[".]' '{print $3"."$4"."$5+1}' version.sbt)-SNAPSHOT sbt -Dproject.version=$version --mem 4096 --batch publishSigned diff --git a/build.sbt b/build.sbt index 3d597af7d6b..5fbff50884c 100644 --- a/build.sbt +++ b/build.sbt @@ -149,7 +149,7 @@ lazy val `waves-node` = (project in file(".")) inScope(Global)( Seq( - scalaVersion := "2.13.15", + scalaVersion := "2.13.16", organization := "com.wavesplatform", organizationName := "Waves Platform", organizationHomepage := Some(url("https://wavesplatform.com")), diff --git a/lang/jvm/src/main/scala/com/wavesplatform/lang/FileCompiler.scala b/lang/jvm/src/main/scala/com/wavesplatform/lang/FileCompiler.scala index 70528554f8d..99469ac43b6 100644 --- a/lang/jvm/src/main/scala/com/wavesplatform/lang/FileCompiler.scala +++ b/lang/jvm/src/main/scala/com/wavesplatform/lang/FileCompiler.scala @@ -1,10 +1,10 @@ package com.wavesplatform.lang -import com.google.common.base.Charsets import com.google.common.io import com.wavesplatform.lang.v1.estimator.v3.ScriptEstimatorV3 import java.io.File +import java.nio.charset.StandardCharsets object FileCompiler extends App { private val estimator = ScriptEstimatorV3.latest @@ -19,12 +19,12 @@ object FileCompiler extends App { pathname.isFile && pathname.getAbsoluteFile != scriptFile }) .map { f => - f.getName -> io.Files.asCharSource(f, Charsets.UTF_8).read() + f.getName -> io.Files.asCharSource(f, StandardCharsets.UTF_8).read() } .toMap API - .compile(io.Files.asCharSource(scriptFile, Charsets.UTF_8).read(), estimator, libraries = imports) + .compile(io.Files.asCharSource(scriptFile, StandardCharsets.UTF_8).read(), estimator, libraries = imports) .fold( error => throw new RuntimeException(s"$error while compiling $path"), _ => println(s"successfully compiled $path") diff --git a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/ctx/impl/CryptoContext.scala b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/ctx/impl/CryptoContext.scala index 58dc4f8030b..bc73e0724fc 100644 --- a/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/ctx/impl/CryptoContext.scala +++ b/lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/ctx/impl/CryptoContext.scala @@ -4,6 +4,7 @@ import cats.implicits.* import cats.{Id, Monad} import com.wavesplatform.common.merkle.Merkle.createRoot import com.wavesplatform.common.state.ByteStr +import com.wavesplatform.lang.* import com.wavesplatform.lang.directives.values.* import com.wavesplatform.lang.v1.compiler.Terms.* import com.wavesplatform.lang.v1.compiler.Types.* @@ -14,7 +15,6 @@ import com.wavesplatform.lang.v1.evaluator.FunctionIds.* import com.wavesplatform.lang.v1.evaluator.ctx.impl.crypto.RSA.DigestAlgorithm import com.wavesplatform.lang.v1.evaluator.ctx.{BaseFunction, EvaluationContext, NativeFunction} import com.wavesplatform.lang.v1.{BaseGlobal, CTX} -import com.wavesplatform.lang.* import scala.collection.mutable import scala.util.Try @@ -50,7 +50,7 @@ object CryptoContext { } ) - private val ctxCache = mutable.AnyRefMap.empty[(BaseGlobal, StdLibVersion), CTX[NoContext]] + private val ctxCache = mutable.HashMap.empty[(BaseGlobal, StdLibVersion), CTX[NoContext]] private def buildNew(global: BaseGlobal, version: StdLibVersion): CTX[NoContext] = { def functionFamily( @@ -373,8 +373,7 @@ object CryptoContext { case xs @ ARR(proof) :: CONST_BYTESTR(value) :: CONST_LONG(index) :: Nil => val sizeCheckedProofs = proof.collect { case bs @ CONST_BYTESTR(v) if v.size == 32 => bs } if (value.size == 32 && proof.length <= 16 && sizeCheckedProofs.size == proof.size) { - Try(createRoot(value.arr, Math.toIntExact(index), sizeCheckedProofs.reverse.map(_.bs.arr))) - .toEither + Try(createRoot(value.arr, Math.toIntExact(index), sizeCheckedProofs.reverse.map(_.bs.arr))).toEither .leftMap(e => ThrownError(if (e.getMessage != null) e.getMessage else "error")) .flatMap(r => CONST_BYTESTR(ByteStr(r))) } else { diff --git a/node/src/main/scala/com/wavesplatform/ResponsivenessLogs.scala b/node/src/main/scala/com/wavesplatform/ResponsivenessLogs.scala index 790c6050eac..9c8e476bb91 100644 --- a/node/src/main/scala/com/wavesplatform/ResponsivenessLogs.scala +++ b/node/src/main/scala/com/wavesplatform/ResponsivenessLogs.scala @@ -1,26 +1,25 @@ package com.wavesplatform -import java.io.{FileOutputStream, PrintWriter} -import java.time.LocalDate -import java.time.format.DateTimeFormatter - -import scala.collection.mutable -import scala.concurrent.duration.Duration -import scala.util.Try - import com.wavesplatform.account.Address import com.wavesplatform.common.state.ByteStr import com.wavesplatform.lang.ValidationError import com.wavesplatform.metrics.Metrics -import com.wavesplatform.transaction.{AuthorizedTransaction, Transaction, TxValidationError} import com.wavesplatform.transaction.smart.InvokeScriptTransaction +import com.wavesplatform.transaction.{AuthorizedTransaction, Transaction, TxValidationError} import com.wavesplatform.utils.ScorexLogging import org.influxdb.dto.Point +import java.io.{FileOutputStream, PrintWriter} +import java.time.LocalDate +import java.time.format.DateTimeFormatter +import scala.collection.mutable +import scala.concurrent.duration.Duration +import scala.util.Try + private class ResponsivenessLogs(csvPrefix: String, metricName: String) extends ScorexLogging { import ResponsivenessLogs.TxEvent - //noinspection ScalaStyle + // noinspection ScalaStyle private[this] case class MetricSnapshot(point: Point.Builder = null, nano: Long = System.nanoTime(), millis: Long = System.currentTimeMillis()) private[this] case class TxState( @@ -32,7 +31,7 @@ private class ResponsivenessLogs(csvPrefix: String, metricName: String) extends miningAttempt: Int, height: Int ) - private[this] val stateMap = mutable.AnyRefMap.empty[ByteStr, TxState] + private[this] val stateMap = mutable.HashMap.empty[ByteStr, TxState] def writeEvent( height: Int, @@ -76,48 +75,46 @@ private class ResponsivenessLogs(csvPrefix: String, metricName: String) extends .addField("height", height) if (eventType == TxEvent.Mined) { - stateMap.get(tx.id()).foreach { - case TxState(received, lastReceived, firstMined, _, _, attempt, _) => - val delta = toMillis(nowNanos - received) - val lastDelta = toMillis(nowNanos - lastReceived) - log.trace(s"Neutrino mining time for ${tx.id()} (attempt #$attempt): $delta ms ($lastDelta from last recv)") - - val snapshot = MetricSnapshot(basePoint.addField("time-to-mine", delta).addField("time-to-last-mine", lastDelta), nowNanos) - stateMap(tx.id()) = TxState( - received, - lastReceived, - firstMined.orElse(Some(snapshot)), - Some(snapshot), - None, - attempt, - height - ) + stateMap.get(tx.id()).foreach { case TxState(received, lastReceived, firstMined, _, _, attempt, _) => + val delta = toMillis(nowNanos - received) + val lastDelta = toMillis(nowNanos - lastReceived) + log.trace(s"Neutrino mining time for ${tx.id()} (attempt #$attempt): $delta ms ($lastDelta from last recv)") + + val snapshot = MetricSnapshot(basePoint.addField("time-to-mine", delta).addField("time-to-last-mine", lastDelta), nowNanos) + stateMap(tx.id()) = TxState( + received, + lastReceived, + firstMined.orElse(Some(snapshot)), + Some(snapshot), + None, + attempt, + height + ) } } else if (eventType == TxEvent.Expired || (eventType == TxEvent.Invalidated && !isAlreadyInTheState)) { - stateMap.get(tx.id()).foreach { - case st @ TxState(received, lastReceived, firstMined, _, _, _, _) => - val delta = toMillis(nowNanos - received) - val lastDelta = toMillis(nowNanos - lastReceived) - log.trace(s"Neutrino fail time for ${tx.id()}: $delta ms") - - val baseFailedPoint = basePoint - .tag("reason", reasonClass) - .addField("time-to-fail", delta) - .addField("time-to-last-fail", lastDelta) - - val failedPoint = firstMined match { - case Some(ms) => - val ffDelta = toMillis(nowNanos - ms.nano) - val firstDelta = toMillis(ms.nano - received) - baseFailedPoint - .addField("time-to-first-mine", firstDelta) - .addField("time-to-finish-after-first-mining", ffDelta) - - case None => - baseFailedPoint - } - - stateMap(tx.id()) = st.copy(failed = Some(MetricSnapshot(failedPoint))) + stateMap.get(tx.id()).foreach { case st @ TxState(received, lastReceived, firstMined, _, _, _, _) => + val delta = toMillis(nowNanos - received) + val lastDelta = toMillis(nowNanos - lastReceived) + log.trace(s"Neutrino fail time for ${tx.id()}: $delta ms") + + val baseFailedPoint = basePoint + .tag("reason", reasonClass) + .addField("time-to-fail", delta) + .addField("time-to-last-fail", lastDelta) + + val failedPoint = firstMined match { + case Some(ms) => + val ffDelta = toMillis(nowNanos - ms.nano) + val firstDelta = toMillis(ms.nano - received) + baseFailedPoint + .addField("time-to-first-mine", firstDelta) + .addField("time-to-finish-after-first-mining", ffDelta) + + case None => + baseFailedPoint + } + + stateMap(tx.id()) = st.copy(failed = Some(MetricSnapshot(failedPoint))) } } diff --git a/node/src/main/scala/com/wavesplatform/api/common/CommonAccountsApi.scala b/node/src/main/scala/com/wavesplatform/api/common/CommonAccountsApi.scala index 299d738c27b..19552e5cfbe 100644 --- a/node/src/main/scala/com/wavesplatform/api/common/CommonAccountsApi.scala +++ b/node/src/main/scala/com/wavesplatform/api/common/CommonAccountsApi.scala @@ -1,6 +1,5 @@ package com.wavesplatform.api.common -import com.google.common.base.Charsets import com.google.common.collect.AbstractIterator import com.wavesplatform.account.{Address, Alias} import com.wavesplatform.api.common.AddressPortfolio.{assetBalanceIterator, nftIterator} @@ -15,6 +14,7 @@ import monix.eval.Task import monix.reactive.Observable import org.rocksdb.RocksIterator +import java.nio.charset.StandardCharsets import java.util.regex.Pattern import scala.annotation.tailrec import scala.jdk.CollectionConverters.* @@ -178,7 +178,7 @@ object CommonAccountsApi { endOfData() } } else { - val dataKey = new String(iter.key().drop(prefix.length), Charsets.UTF_8) + val dataKey = new String(iter.key().drop(prefix.length), StandardCharsets.UTF_8) if (matches(dataKey)) { nextDbEntry = Option(iter.value()).map { arr => Keys.data(addressId, dataKey).parse(arr).entry diff --git a/node/src/main/scala/com/wavesplatform/database/RocksDBWriter.scala b/node/src/main/scala/com/wavesplatform/database/RocksDBWriter.scala index 2d4b0f8ccb8..15385f9cdeb 100644 --- a/node/src/main/scala/com/wavesplatform/database/RocksDBWriter.scala +++ b/node/src/main/scala/com/wavesplatform/database/RocksDBWriter.scala @@ -843,7 +843,7 @@ class RocksDBWriter( } private def batchCleanupAssetBalances(fromInclusive: Height, toExclusive: Height, rw: RW): Unit = { - val lastUpdateAt = mutable.AnyRefMap.empty[(AddressId, IssuedAsset), Height] + val lastUpdateAt = mutable.HashMap.empty[(AddressId, IssuedAsset), Height] val updateAt = new ArrayBuffer[(AddressId, IssuedAsset, Height)]() // First height of update in this range val updateAtKeys = new ArrayBuffer[Key[BalanceNode]]() @@ -890,7 +890,7 @@ class RocksDBWriter( private def batchCleanupAccountData(fromInclusive: Height, toExclusive: Height, rw: RW): Unit = { val changedDataAddresses = mutable.Set.empty[AddressId] - val lastUpdateAt = mutable.AnyRefMap.empty[(AddressId, String), Height] + val lastUpdateAt = mutable.HashMap.empty[(AddressId, String), Height] val updateAt = new ArrayBuffer[(AddressId, String, Height)]() // First height of update in this range val updateAtKeys = new ArrayBuffer[Key[DataNode]]() diff --git a/node/src/main/scala/com/wavesplatform/database/package.scala b/node/src/main/scala/com/wavesplatform/database/package.scala index 59a768b73fa..978ae09b731 100644 --- a/node/src/main/scala/com/wavesplatform/database/package.scala +++ b/node/src/main/scala/com/wavesplatform/database/package.scala @@ -1,6 +1,5 @@ package com.wavesplatform -import com.google.common.base.Charsets.UTF_8 import com.google.common.collect.{Interners, Maps} import com.google.common.io.ByteStreams.{newDataInput, newDataOutput} import com.google.common.io.{ByteArrayDataInput, ByteArrayDataOutput} @@ -32,6 +31,7 @@ import sun.nio.ch.Util import supertagged.TaggedType import java.nio.ByteBuffer +import java.nio.charset.StandardCharsets import java.util.Map as JMap import scala.annotation.tailrec import scala.collection.mutable.ArrayBuffer @@ -97,7 +97,7 @@ package object database { while (i < data.length) { val len = ((data(i) << 8) | (data(i + 1) & 0xff)).toShort // Optimization - s += new String(data, i + 2, len, UTF_8) + s += new String(data, i + 2, len, StandardCharsets.UTF_8) i += (2 + len) } s.result() diff --git a/node/src/main/scala/com/wavesplatform/network/Handshake.scala b/node/src/main/scala/com/wavesplatform/network/Handshake.scala index 48677c508aa..3d2786f02a6 100644 --- a/node/src/main/scala/com/wavesplatform/network/Handshake.scala +++ b/node/src/main/scala/com/wavesplatform/network/Handshake.scala @@ -1,10 +1,10 @@ package com.wavesplatform.network -import java.net.{InetAddress, InetSocketAddress} - -import com.google.common.base.Charsets +import com.wavesplatform.utils.* import io.netty.buffer.ByteBuf -import com.wavesplatform.utils._ + +import java.net.{InetAddress, InetSocketAddress} +import java.nio.charset.StandardCharsets case class Handshake( applicationName: String, @@ -57,14 +57,14 @@ object Handshake { if (appNameSize < 0 || appNameSize > Byte.MaxValue) { throw new InvalidHandshakeException(s"An invalid application name's size: $appNameSize") } - val appName = in.readSlice(appNameSize).toString(Charsets.UTF_8) + val appName = in.readSlice(appNameSize).toString(StandardCharsets.UTF_8) val appVersion = (in.readInt(), in.readInt(), in.readInt()) val nodeNameSize = in.readByte() if (nodeNameSize < 0 || nodeNameSize > Byte.MaxValue) { throw new InvalidHandshakeException(s"An invalid node name's size: $nodeNameSize") } - val nodeName = in.readSlice(nodeNameSize).toString(Charsets.UTF_8) + val nodeName = in.readSlice(nodeNameSize).toString(StandardCharsets.UTF_8) val nonce = in.readLong() diff --git a/node/src/main/scala/com/wavesplatform/utils/package.scala b/node/src/main/scala/com/wavesplatform/utils/package.scala index b1b781adde5..079b0d9026f 100644 --- a/node/src/main/scala/com/wavesplatform/utils/package.scala +++ b/node/src/main/scala/com/wavesplatform/utils/package.scala @@ -1,6 +1,5 @@ package com.wavesplatform -import com.google.common.base.Charsets import com.google.common.primitives.UnsignedBytes import com.google.protobuf.ByteString import com.wavesplatform.common.state.ByteStr @@ -8,6 +7,7 @@ import com.wavesplatform.common.utils.{Base58, Base64} import com.wavesplatform.lang.v1.compiler.Terms.* import play.api.libs.json.* +import java.nio.charset.StandardCharsets import java.security.SecureRandom import scala.annotation.tailrec @@ -80,7 +80,7 @@ package object utils { } implicit class StringBytes(val s: String) extends AnyVal { - def utf8Bytes: Array[Byte] = s.getBytes(Charsets.UTF_8) + def utf8Bytes: Array[Byte] = s.getBytes(StandardCharsets.UTF_8) def toByteString: ByteString = ByteString.copyFromUtf8(s) } @@ -98,5 +98,5 @@ package object utils { case FAIL(reason) => Json.obj("error" -> ApiError.ScriptExecutionError.Id, "error" -> reason) } - implicit val byteStrOrdering: Ordering[ByteStr] = (x, y) => UnsignedBytes.lexicographicalComparator().compare(x.arr, y.arr) + implicit val byteStrOrdering: Ordering[ByteStr] = (x, y) => UnsignedBytes.lexicographicalComparator().compare(x.arr, y.arr) } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 96750a0fd8d..a2549afc9b8 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -25,26 +25,26 @@ object Dependencies { val kindProjector = compilerPlugin("org.typelevel" % "kind-projector" % "0.13.3" cross CrossVersion.full) val akkaHttp = akkaHttpModule("akka-http") - val googleGuava = "com.google.guava" % "guava" % "33.3.1-jre" + val googleGuava = "com.google.guava" % "guava" % "33.4.0-jre" val kamonCore = kamonModule("core") val machinist = "org.typelevel" %% "machinist" % "0.6.8" - val logback = "ch.qos.logback" % "logback-classic" % "1.5.11" + val logback = "ch.qos.logback" % "logback-classic" % "1.5.16" val janino = "org.codehaus.janino" % "janino" % "3.1.12" - val asyncHttpClient = "org.asynchttpclient" % "async-http-client" % "3.0.0" + val asyncHttpClient = "org.asynchttpclient" % "async-http-client" % "3.0.1" val curve25519 = "com.wavesplatform" % "curve25519-java" % "0.6.6" - val nettyHandler = "io.netty" % "netty-handler" % "4.1.110.Final" + val nettyHandler = "io.netty" % "netty-handler" % "4.1.116.Final" val shapeless = Def.setting("com.chuusai" %%% "shapeless" % "2.3.12") val playJson = "com.typesafe.play" %% "play-json" % "2.10.6" val scalaTest = "org.scalatest" %% "scalatest" % "3.2.19" % Test - val scalaJsTest = Def.setting("com.lihaoyi" %%% "utest" % "0.8.4" % Test) + val scalaJsTest = Def.setting("com.lihaoyi" %%% "utest" % "0.8.5" % Test) - val sttp3 = "com.softwaremill.sttp.client3" % "core_2.13" % "3.10.1" - val sttp3Monix = "com.softwaremill.sttp.client3" %% "monix" % "3.10.1" + val sttp3 = "com.softwaremill.sttp.client3" % "core_2.13" % "3.10.2" + val sttp3Monix = "com.softwaremill.sttp.client3" %% "monix" % "3.10.2" - val bouncyCastleProvider = "org.bouncycastle" % s"bcprov-jdk18on" % "1.78.1" + val bouncyCastleProvider = "org.bouncycastle" % s"bcprov-jdk18on" % "1.79" val console = Seq("com.github.scopt" %% "scopt" % "4.1.0") @@ -77,7 +77,7 @@ object Dependencies { logback, "com.github.jnr" % "jnr-unixsocket" % "0.38.23", // To support Apple ARM "com.spotify" % "docker-client" % "8.16.0", - "com.fasterxml.jackson.dataformat" % "jackson-dataformat-properties" % "2.18.0", + "com.fasterxml.jackson.dataformat" % "jackson-dataformat-properties" % "2.18.2", asyncHttpClient ).map(_ % Test) @@ -86,7 +86,7 @@ object Dependencies { "org.scalatestplus" %% "scalacheck-1-16" % "3.2.14.0", "org.scalacheck" %% "scalacheck" % "1.18.1", "org.mockito" % "mockito-all" % "1.10.19", - "org.scalamock" %% "scalamock" % "6.0.0" + "org.scalamock" %% "scalamock" % "6.1.1" ).map(_ % Test) lazy val qaseReportDeps = Seq( @@ -100,15 +100,15 @@ object Dependencies { akkaModule("slf4j") % Runtime ) - private val rocksdb = "org.rocksdb" % "rocksdbjni" % "9.6.1" + private val rocksdb = "org.rocksdb" % "rocksdbjni" % "9.8.4" lazy val node = Def.setting( Seq( rocksdb, ("org.rudogma" %%% "supertagged" % "2.0-RC2").exclude("org.scala-js", "scalajs-library_2.13"), "commons-net" % "commons-net" % "3.11.1", - "commons-io" % "commons-io" % "2.17.0", - "com.github.pureconfig" %% "pureconfig" % "0.17.7", + "commons-io" % "commons-io" % "2.18.0", + "com.github.pureconfig" %% "pureconfig" % "0.17.8", "net.logstash.logback" % "logstash-logback-encoder" % "8.0" % Runtime, kamonCore, kamonModule("system-metrics"), @@ -127,8 +127,8 @@ object Dependencies { monixModule("reactive").value, nettyHandler, "com.typesafe.scala-logging" %% "scala-logging" % "3.9.5", - "eu.timepit" %% "refined" % "0.11.2" exclude ("org.scala-lang.modules", "scala-xml_2.13"), - "com.esaulpaugh" % "headlong" % "12.3.1", + "eu.timepit" %% "refined" % "0.11.3" exclude ("org.scala-lang.modules", "scala-xml_2.13"), + "com.esaulpaugh" % "headlong" % "12.3.3", "com.github.jbellis" % "jamm" % "0.4.0", // Weighing caches web3jModule("abi").excludeAll(ExclusionRule("org.bouncycastle", "bcprov-jdk15on")) ) ++ console ++ logDeps ++ protobuf.value ++ langCompilerPlugins.value diff --git a/project/build.properties b/project/build.properties index ee4c672cd0d..73df629ac1a 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.10.1 +sbt.version=1.10.7 diff --git a/project/plugins.sbt b/project/plugins.sbt index 3ab212d5895..c79d476797a 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -6,7 +6,7 @@ resolvers ++= Seq( // Should go before Scala.js addSbtPlugin("com.thesamet" % "sbt-protoc" % "1.0.7") -libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.11.17" +libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "1.0.0-alpha.1" Seq( "com.eed3si9n" % "sbt-assembly" % "2.3.0",