Skip to content

Commit

Permalink
update torchsharp and helix image (#7188)
Browse files Browse the repository at this point in the history
* update torchsharp and helix image

* fix build error

* Update .vsts-dotnet-ci.yml

* use centos

* Update Versions.props
  • Loading branch information
LittleLittleCloud authored Jul 17, 2024
1 parent 579fe03 commit 05bb0e3
Show file tree
Hide file tree
Showing 6 changed files with 10 additions and 13 deletions.
6 changes: 3 additions & 3 deletions .vsts-dotnet-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ jobs:
pool:
name: NetCore-Public
demands: ImageOverride -equals build.Ubuntu.2204.amd64.open
helixQueue: [email protected]/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm64v8
helixQueue: [email protected]/dotnet-buildtools/prereqs:ubuntu-22.04-helix-arm64v8

- template: /build/ci/job-template.yml
parameters:
Expand All @@ -104,7 +104,7 @@ jobs:
pool:
name: NetCore-Public
demands: ImageOverride -equals build.Ubuntu.2204.amd64.open
helixQueue: [email protected]/dotnet-buildtools/prereqs:centos-stream8-mlnet-helix
helixQueue: [email protected]/dotnet-buildtools/prereqs:centos-stream9-mlnet-helix

- template: /build/ci/job-template.yml
parameters:
Expand All @@ -115,7 +115,7 @@ jobs:
pool:
name: NetCore-Public
demands: ImageOverride -equals build.Ubuntu.2204.amd64.open
helixQueue: [email protected]/dotnet-buildtools/prereqs:ubuntu-18.04-mlnet-helix
helixQueue: [email protected]/dotnet-buildtools/prereqs:ubuntu-22.04-mlnet-helix

- template: /build/ci/job-template.yml
parameters:
Expand Down
2 changes: 1 addition & 1 deletion build/ci/job-template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ jobs:
- ${{ if eq(parameters.nightlyBuild, 'false') }}:
- ${{ if eq(parameters.innerLoop, 'false') }}:
- ${{ if and(eq(parameters.runSpecific, 'false'), eq(parameters.useVSTestTask, 'false')) }}:
- script: set PATH=%PATH%;%USERPROFILE%\.nuget\packages\libtorch-cpu-win-x64\2.1.0.1\runtimes\win-x64\native;%USERPROFILE%\.nuget\packages\torchsharp\0.101.5\runtimes\win-x64\native & ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest /p:Coverage=${{ parameters.codeCoverage }} $(testTargetFramework)
- script: set PATH=%PATH%;%USERPROFILE%\.nuget\packages\libtorch-cpu-win-x64\2.2.0.1\runtimes\win-x64\native;%USERPROFILE%\.nuget\packages\torchsharp\0.102.5\runtimes\win-x64\native & ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest /p:Coverage=${{ parameters.codeCoverage }} $(testTargetFramework)
displayName: Run All Tests.
- ${{ if and(eq(parameters.runSpecific, 'true'), eq(parameters.useVSTestTask, 'false')) }}:
- script: ${{ parameters.buildScript }} /p:Build=false -configuration $(_configuration) /p:TargetArchitecture=${{ parameters.architecture }} /p:TestArchitectures=${{ parameters.architecture }} -test -integrationTest /p:TestRunnerAdditionalArguments='-trait$(spaceValue)Category=RunSpecificTest' /p:Coverage=${{ parameters.codeCoverage }} $(testTargetFramework)
Expand Down
4 changes: 2 additions & 2 deletions eng/Versions.props
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,8 @@
<TensorflowDotNETVersion>0.20.1</TensorflowDotNETVersion>
<TensorFlowMajorVersion>2</TensorFlowMajorVersion>
<TensorFlowVersion>2.3.1</TensorFlowVersion>
<TorchSharpVersion>0.101.5</TorchSharpVersion>
<LibTorchVersion>2.1.0.1</LibTorchVersion>
<TorchSharpVersion>0.102.7</TorchSharpVersion>
<LibTorchVersion>2.2.1.1</LibTorchVersion>
<!-- Build/infrastructure Dependencies -->
<CodecovVersion>1.12.4</CodecovVersion>
<CoverletCollectorVersion>3.1.2</CoverletCollectorVersion>
Expand Down
5 changes: 1 addition & 4 deletions src/Microsoft.ML.GenAI.Core/Microsoft.ML.GenAI.Core.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,7 @@

<ItemGroup>
<PackageReference Include="System.Memory" Version="$(SystemMemoryVersion)" />
<PackageReference Include="TorchSharp" Version="0.102.5" />
<PackageReference Include="libtorch-cpu-win-x64" Version="2.2.1.1" Condition="$([MSBuild]::IsOSPlatform('Windows'))" PrivateAssets="all" />
<PackageReference Include="libtorch-cpu-linux-x64" Version="2.2.1.1" Condition="$([MSBuild]::IsOSPlatform('Linux'))" PrivateAssets="all" />
<PackageReference Include="libtorch-cpu-osx-x64" Version="2.2.1.1" Condition="$([MSBuild]::IsOSPlatform('OSX'))" PrivateAssets="all" />
<PackageReference Include="TorchSharp" Version="$(TorchSharpVersion)" />
</ItemGroup>

<ItemGroup>
Expand Down
2 changes: 1 addition & 1 deletion src/Microsoft.ML.TorchSharp/AutoFormerV2/ConvModule.cs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public class ConvModule : Module<Tensor, Tensor>
public ConvModule(int inChannel, int outChannel, int kernelSize, int stride = 1, int padding = 0, int dilation = 1, bool bias = true, bool useRelu = true)
: base(nameof(ConvModule))
{
this.conv = nn.Conv2d(inputChannel: inChannel, outputChannel: outChannel, kernelSize: kernelSize, stride: stride, padding: padding, dilation: dilation, bias: bias);
this.conv = nn.Conv2d(in_channels: inChannel, out_channels: outChannel, kernelSize: kernelSize, stride: stride, padding: padding, dilation: dilation, bias: bias);
this.useRelu = useRelu;
if (this.useRelu)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public void Step()
public double GetGradNorm()
{
return Math.Sqrt(Parameters
.Select(p => p.grad())
.Select(p => p.grad)
.Where(grad => grad.IsNotNull()) // parameters unused have no gradient
.Select(grad => grad.square().sum().ToDouble())
.Sum());
Expand All @@ -82,7 +82,7 @@ public void MultiplyGrads(double c)
{
foreach (var p in Parameters)
{
using var grad = p.grad();
using var grad = p.grad;
if (grad.IsNotNull())
{
grad.mul_(c);
Expand Down

0 comments on commit 05bb0e3

Please sign in to comment.