Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Diagnose #454 #455

Closed
wants to merge 17 commits into from
101 changes: 0 additions & 101 deletions .github/workflows/check-build-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,115 +15,14 @@ concurrency:
cancel-in-progress: true

jobs:
style-compile-mima:
name: Compile, Code Style, Binary Compatibility
runs-on: ubuntu-20.04
env:
JAVA_OPTS: -Xms2G -Xmx3G -Xss2M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8

steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-tags: true
fetch-depth: 0

- name: Setup Java 8
uses: actions/setup-java@v4
with:
distribution: temurin
java-version: 8

- name: Cache Coursier cache
uses: coursier/cache-action@v6

- name: "Code style, compile tests, MiMa. Run locally with: sbt \"javafmtCheckAll; +Test/compile; +mimaReportBinaryIssues\""
run: sbt "javafmtCheckAll; +Test/compile; +mimaReportBinaryIssues"

documentation:
name: ScalaDoc, Documentation with Paradox
runs-on: ubuntu-20.04
env:
JAVA_OPTS: -Xms2G -Xmx3G -Xss2M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8

steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-tags: true
fetch-depth: 0

- name: Setup Java 11
uses: actions/setup-java@v4
with:
distribution: temurin
java-version: 11

- name: Cache Coursier cache
uses: coursier/cache-action@v6

- name: "Create all API docs and create site with Paradox"
run: sbt docs/makeSite

# TODO: Fix after documentation updates
# - name: Run Link Validator
# run: cs launch net.runne::site-link-validator:0.2.2 -- scripts/link-validator.conf

connectors:
runs-on: ubuntu-20.04

strategy:
fail-fast: false
matrix:
include:
- { connector: amqp, pre_cmd: 'docker-compose up -d amqp' }
- { connector: avroparquet }
- { connector: awslambda }
- { connector: aws-event-bridge, pre_cmd: 'docker-compose up -d amazoneventbridge' }
- { connector: azure-storage-queue }
- { connector: cassandra, pre_cmd: 'docker-compose up -d cassandra' }
- { connector: couchbase, pre_cmd: 'docker-compose up -d couchbase_prep' }
- { connector: csv }
- { connector: dynamodb, pre_cmd: 'docker-compose up -d dynamodb' }
- { connector: elasticsearch, pre_cmd: 'docker-compose up -d elasticsearch6 elasticsearch7 opensearch1' }
- { connector: file }
- { connector: ftp, pre_cmd: './scripts/ftp-servers.sh' }
- { connector: geode, pre_cmd: 'docker-compose up -d geode' }
- { connector: google-cloud-bigquery }
- { connector: google-cloud-bigquery-storage }
- { connector: google-cloud-pub-sub, pre_cmd: 'docker-compose up -d gcloud-pubsub-emulator_prep' }
- { connector: google-cloud-pub-sub-grpc, pre_cmd: 'docker-compose up -d gcloud-pubsub-emulator_prep' }
- { connector: google-cloud-storage }
- { connector: google-common }
- { connector: google-fcm }
# hbase disabled until we resolve why new docker image fails our build: https://github.com/akka/alpakka/issues/2185
# - { connector: hbase, pre_cmd: 'docker-compose up -d hbase' }
- { connector: hdfs, pre_cmd: 'file ${HOME}/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-kernel_2.13/2.0.0/cats-kernel_2.13-2.0.0.jar' }
- { connector: huawei-push-kit }
- { connector: influxdb, pre_cmd: 'docker-compose up -d influxdb' }
- { connector: ironmq, pre_cmd: 'docker-compose up -d ironauth ironmq' }
- { connector: jms, pre_cmd: 'docker-compose up -d ibmmq' }
- { connector: json-streaming }
- { connector: kinesis }
- { connector: kudu, pre_cmd: 'docker-compose up -d kudu-master-data kudu-tserver-data kudu-master kudu-tserver' }
- { connector: mongodb, pre_cmd: 'docker-compose up -d mongo' }
- { connector: mqtt, pre_cmd: 'docker-compose up -d mqtt' }
- { connector: mqtt-streaming, pre_cmd: 'docker-compose up -d mqtt' }
- { connector: orientdb, pre_cmd: 'docker-compose up -d orientdb' }
- { connector: pravega, pre_cmd: 'docker-compose up -d pravega'}
- { connector: reference }
- { connector: s3 }
- { connector: spring-web }
- { connector: simple-codecs }
- { connector: slick }
- { connector: sns, pre_cmd: 'docker-compose up -d amazonsns' }
- { connector: solr }
- { connector: sqs, pre_cmd: 'docker-compose up -d elasticmq' }
- { connector: sse }
- { connector: text }
- { connector: udp }
- { connector: unix-domain-socket }
- { connector: xml }

env:
JAVA_OPTS: -Xms2G -Xmx3G -Xss2M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8
Expand Down
23 changes: 0 additions & 23 deletions .github/workflows/format.yml

This file was deleted.

33 changes: 0 additions & 33 deletions .github/workflows/headers.yml

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -648,7 +648,7 @@ final class ActorMqttServerSession(settings: MqttSessionSettings)(implicit syste
case _: WatchedActorTerminatedException => ByteString.empty
}
.filter(_.nonEmpty)
.log("server-commandFlow", _.iterator.decodeControlPacket(settings.maxPacketSize)) // we decode here so we can see the generated packet id
.log(s"server-commandFlow-${connectionId}", _.iterator.decodeControlPacket(settings.maxPacketSize)) // we decode here so we can see the generated packet id
.withAttributes(ActorAttributes.logLevels(onFailure = Logging.DebugLevel)))
}
.mapMaterializedValue(_ => NotUsed)
Expand All @@ -667,7 +667,7 @@ final class ActorMqttServerSession(settings: MqttSessionSettings)(implicit syste
}
.via(new MqttFrameStage(settings.maxPacketSize))
.map(_.iterator.decodeControlPacket(settings.maxPacketSize))
.log("server-events")
.log(s"server-events-${connectionId}")
.mapAsync[Either[MqttCodec.DecodeError, Event[A]]](settings.eventParallelism) {
case Right(cp: Connect) =>
val reply = Promise[ClientConnection.ForwardConnect.type]()
Expand Down
Loading