Skip to content

Commit

Permalink
feat: Bump model versions (#299)
Browse files Browse the repository at this point in the history
  • Loading branch information
ishaansehgal99 authored and helayoty committed Mar 29, 2024
1 parent a29d3bb commit 4a7a379
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 13 deletions.
8 changes: 4 additions & 4 deletions presets/models/falcon/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,10 @@ var (
PresetFalcon40BInstructModel = PresetFalcon40BModel + "-instruct"

PresetFalconTagMap = map[string]string{
"Falcon7B": "0.0.2",
"Falcon7BInstruct": "0.0.2",
"Falcon40B": "0.0.2",
"Falcon40BInstruct": "0.0.2",
"Falcon7B": "0.0.3",
"Falcon7BInstruct": "0.0.3",
"Falcon40B": "0.0.3",
"Falcon40BInstruct": "0.0.3",
}

baseCommandPresetFalcon = "accelerate launch"
Expand Down
12 changes: 6 additions & 6 deletions presets/models/mistral/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@ func init() {
}

var (
PresetMistral7BModel = "mistral-7b"
PresetMistral7BInstructModel = PresetMistral7BModel + "-instruct"
PresetMistral7BModel = "mistral-7b"
PresetMistral7BInstructModel = PresetMistral7BModel + "-instruct"

PresetMistralTagMap = map[string]string{
"Mistral7B": "0.0.2",
"Mistral7BInstruct": "0.0.2",
"Mistral7B": "0.0.3",
"Mistral7BInstruct": "0.0.3",
}

baseCommandPresetMistral = "accelerate launch"
Expand All @@ -46,7 +46,7 @@ func (*mistral7b) GetInferenceParameters() *model.PresetInferenceParam {
return &model.PresetInferenceParam{
ModelFamilyName: "Mistral",
ImageAccessMode: string(kaitov1alpha1.ModelImageAccessModePublic),
DiskStorageRequirement: "50Gi",
DiskStorageRequirement: "100Gi",
GPUCountRequirement: "1",
TotalGPUMemoryRequirement: "14Gi",
PerGPUMemoryRequirement: "0Gi", // We run Mistral using native vertical model parallel, no per GPU memory requirement.
Expand All @@ -70,7 +70,7 @@ func (*mistral7bInst) GetInferenceParameters() *model.PresetInferenceParam {
return &model.PresetInferenceParam{
ModelFamilyName: "Mistral",
ImageAccessMode: string(kaitov1alpha1.ModelImageAccessModePublic),
DiskStorageRequirement: "50Gi",
DiskStorageRequirement: "100Gi",
GPUCountRequirement: "1",
TotalGPUMemoryRequirement: "16Gi",
PerGPUMemoryRequirement: "0Gi", // We run mistral using native vertical model parallel, no per GPU memory requirement.
Expand Down
6 changes: 3 additions & 3 deletions presets/models/phi/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,10 @@ func init() {
}

var (
PresetPhi2Model = "phi-2"
PresetPhi2Model = "phi-2"

PresetPhiTagMap = map[string]string{
"Phi2": "0.0.1",
"Phi2": "0.0.2",
}

baseCommandPresetPhi = "accelerate launch"
Expand All @@ -40,7 +40,7 @@ func (*phi2) GetInferenceParameters() *model.PresetInferenceParam {
return &model.PresetInferenceParam{
ModelFamilyName: "Phi",
ImageAccessMode: string(kaitov1alpha1.ModelImageAccessModePublic),
DiskStorageRequirement: "30Gi",
DiskStorageRequirement: "50Gi",
GPUCountRequirement: "1",
TotalGPUMemoryRequirement: "12Gi",
PerGPUMemoryRequirement: "0Gi", // We run Phi using native vertical model parallel, no per GPU memory requirement.
Expand Down

0 comments on commit 4a7a379

Please sign in to comment.