Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft: Add arm arch support (Apple Silicon / M1) #3691

Draft
wants to merge 3 commits into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -179,9 +179,7 @@ dependencies {

implementation "org.jgrapht:jgrapht-core:1.3.0"

implementation('com.github.LBNL-UCB-STI:or-tools-wrapper:7.5-0') {
exclude group: 'com.google.protobuf', module: 'protobuf-java'
}
implementation group: 'com.google.ortools', name: 'ortools-java', version: '9.4.1874'

implementation 'com.github.LBNL-UCB-STI:helics-wrapper:v3.3.0'

Expand Down Expand Up @@ -311,7 +309,7 @@ dependencies {

implementation group: 'com.zaxxer', name: 'nuprocess', version: '1.2.4'

def parquet = "1.10.0"
def parquet = "1.12.3"
implementation group: 'org.apache.parquet', name: 'parquet-hadoop', version: parquet
implementation group: 'org.apache.parquet', name: 'parquet-avro', version: parquet
implementation(group: 'org.apache.hadoop', name: 'hadoop-client', version: '2.7.3') {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import beam.agentsim.agents.MobilityRequest
import beam.agentsim.agents.ridehail.RideHailMatching._
import beam.sim.BeamServices
import beam.sim.config.BeamConfig.Beam.Agentsim.Agents.RideHail.Managers$Elm
import com.github.beam.OrToolsLoader
import com.google.ortools.Loader
import com.google.ortools.linearsolver.{MPSolver, MPVariable}
import org.jgrapht.graph.DefaultEdge
import org.matsim.core.utils.collections.QuadTree
Expand All @@ -17,7 +17,7 @@ import scala.concurrent.Future
object AlonsoMoraMatchingWithMIPAssignment {

private lazy val initialize: Unit = {
OrToolsLoader.load()
Loader.loadNativeLibraries()
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import org.apache.hadoop.fs.Path
import org.apache.parquet.avro.AvroParquetWriter
import org.apache.parquet.hadoop.ParquetWriter
import org.apache.parquet.hadoop.metadata.CompressionCodecName
import org.apache.parquet.hadoop.util.HadoopOutputFile
import org.matsim.api.core.v01.events.Event

import scala.collection.JavaConverters._
Expand Down Expand Up @@ -71,10 +72,11 @@ class BeamEventsWriterParquet(

def getWriter(schema: Schema, filePath: String): ParquetWriter[GenericData.Record] = {
val path = new Path(filePath)
val builder = AvroParquetWriter.builder[GenericData.Record](path)
val outputFile = HadoopOutputFile.fromPath(path, new Configuration())
val builder = AvroParquetWriter.builder[GenericData.Record](outputFile)

builder
.withRowGroupSize(ParquetWriter.DEFAULT_BLOCK_SIZE)
.withRowGroupSize(ParquetWriter.DEFAULT_BLOCK_SIZE.toLong)
.withPageSize(ParquetWriter.DEFAULT_PAGE_SIZE)
.withSchema(schema)
.withConf(new Configuration())
Expand Down
8 changes: 4 additions & 4 deletions src/main/scala/beam/router/r5/RouteDumper.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package beam.router.r5

import java.util

import beam.agentsim.agents.vehicles.VehicleProtocol.StreetVehicle
import beam.agentsim.events.SpaceTime
import beam.router.BeamRouter.{EmbodyWithCurrentTravelTime, RoutingRequest, RoutingResponse}
Expand All @@ -11,10 +10,12 @@ import beam.sim.population.{AttributesOfIndividual, HouseholdAttributes}
import org.apache.avro.Schema.Type
import org.apache.avro.generic.GenericData
import org.apache.avro.{Schema, SchemaBuilder}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.parquet.avro.AvroParquetWriter
import org.apache.parquet.hadoop.ParquetWriter
import org.apache.parquet.hadoop.metadata.CompressionCodecName
import org.apache.parquet.hadoop.util.HadoopOutputFile
import org.matsim.api.core.v01.events.Event
import org.matsim.core.controler.OutputDirectoryHierarchy
import org.matsim.core.controler.events.{IterationEndsEvent, IterationStartsEvent}
Expand Down Expand Up @@ -555,10 +556,9 @@ object RouteDumper {
}

def createWriter(path: String, schema: Schema): ParquetWriter[GenericData.Record] = {
val outputFile = HadoopOutputFile.fromPath(new Path(path), new Configuration())
AvroParquetWriter
.builder[GenericData.Record](
new Path(path)
)
.builder[GenericData.Record](outputFile)
.withSchema(schema)
.withCompressionCodec(CompressionCodecName.SNAPPY)
.build()
Expand Down