Skip to content

Commit

Permalink
[akamai] force sync on boot, handle membrs with different ports
Browse files Browse the repository at this point in the history
  • Loading branch information
notandy committed Oct 11, 2024
1 parent c77f7da commit 2928f47
Show file tree
Hide file tree
Showing 3 changed files with 46 additions and 41 deletions.
4 changes: 2 additions & 2 deletions internal/driver/akamai/agent.go
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ func (s *AkamaiAgent) WorkerThread() {
log.Error(err.Error())
}

if err := s.FetchAndSyncDomains(domains); err != nil {
if err := s.FetchAndSyncDomains(domains, true); err != nil {
log.Error(err.Error())
}
case <-s.workerTicker.C: // Activate periodically
Expand All @@ -170,7 +170,7 @@ func (s *AkamaiAgent) WorkerThread() {
log.Error(err.Error())
}

if err := s.FetchAndSyncDomains(nil); err != nil {
if err := s.FetchAndSyncDomains(nil, false); err != nil {
log.Error(err.Error())
}

Expand Down
6 changes: 3 additions & 3 deletions internal/driver/akamai/domain.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,21 +50,21 @@ func (s *AkamaiAgent) EnsureDomain(domainType string) error {
return nil
}

func (s *AkamaiAgent) FetchAndSyncDomains(domains []string) error {
func (s *AkamaiAgent) FetchAndSyncDomains(domains []string, force bool) error {
if s.executing {
return nil
}

s.executing = true
defer func() { s.executing = false }()

log.Debugf("Running FetchAndSyncDomains(domains=%+v)", domains)
log.Debugf("Running FetchAndSyncDomains(domains=%+v, force=%t)", domains, force)
response, err := s.rpc.GetDomains(context.Background(), &server.SearchRequest{
Provider: "akamai",
PageNumber: 0,
ResultPerPage: 1,
FullyPopulated: true,
Pending: domains == nil,
Pending: domains == nil && !force,
Ids: domains,
})
if err != nil {
Expand Down
77 changes: 41 additions & 36 deletions internal/driver/akamai/property.go
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,13 @@ MEMBERLOOP:
driver.GetProvisioningStatusRequest(member.Id, "MEMBER", "ACTIVE"))
}

// due shortcoming of individual liveness test per member, we have to replicate a monitor per unique member port
// collect unique member ports
uniquePorts := make(map[uint32]interface{})
for _, member := range members {
uniquePorts[member.GetPort()] = nil
}

// Add new Monitors
for _, monitor := range monitors {
if monitor.ProvisioningStatus == models.MonitorProvisioningStatusPENDINGDELETE {
Expand All @@ -139,45 +146,43 @@ MEMBERLOOP:
continue
}

livenessTest := gtm.LivenessTest{
Name: monitor.GetId(),
TestObjectProtocol: MONITOR_LIVENESS_TYPE_MAP[monitor.GetType()],
TestInterval: int(monitor.GetInterval()),
TestTimeout: float32(monitor.GetTimeout()),
Disabled: !monitor.GetAdminStateUp(),
}

switch monitor.GetType() {
case rpcmodels.Monitor_HTTPS:
fallthrough
case rpcmodels.Monitor_HTTP:
if monitor.GetSend() == "" {
livenessTest.TestObject = "/"
} else {
livenessTest.TestObject = monitor.GetSend()
}
var testPort uint32 = 80
for _, member := range members {
testPort = member.GetPort()
break
monitorLoop:
for testPort := range uniquePorts {
livenessTest := gtm.LivenessTest{
Name: fmt.Sprintf("%s-%d", monitor.GetId(), testPort),
TestObjectPort: int(testPort),
TestObjectProtocol: MONITOR_LIVENESS_TYPE_MAP[monitor.GetType()],
TestInterval: int(monitor.GetInterval()),
TestTimeout: float32(monitor.GetTimeout()),
Disabled: !monitor.GetAdminStateUp(),
}
livenessTest.TestObjectPort = int(testPort)
livenessTest.HTTPHeaders = []*gtm.HTTPHeader{{Name: "Host", Value: domain.GetFqdn()}}
if domainName := monitor.GetDomainName(); domainName != "" {
livenessTest.HTTPHeaders = []*gtm.HTTPHeader{{Name: "Host", Value: domainName}}

switch monitor.GetType() {
case rpcmodels.Monitor_HTTPS:
fallthrough
case rpcmodels.Monitor_HTTP:
if monitor.GetSend() == "" {
livenessTest.TestObject = "/"
} else {
livenessTest.TestObject = monitor.GetSend()
}
livenessTest.HTTPHeaders = []*gtm.HTTPHeader{{Name: "Host", Value: domain.GetFqdn()}}
if domainName := monitor.GetDomainName(); domainName != "" {
livenessTest.HTTPHeaders = []*gtm.HTTPHeader{{Name: "Host", Value: domainName}}
}
livenessTest.HTTPMethod = swag.String(monitor.GetMethod().String())
case rpcmodels.Monitor_TCP:
livenessTest.RequestString = monitor.GetSend()
livenessTest.ResponseString = monitor.GetReceive()
default:
// unsupported type
log.Warnf("Unsupported monitor type: %s", monitor.GetType())
provRequests = append(provRequests,
driver.GetProvisioningStatusRequest(monitor.Id, "MONITOR", models.MonitorProvisioningStatusERROR))
continue monitorLoop
}
livenessTest.HTTPMethod = swag.String(monitor.GetMethod().String())
case rpcmodels.Monitor_TCP:
livenessTest.RequestString = monitor.GetSend()
livenessTest.ResponseString = monitor.GetReceive()
default:
// unsupported type
log.Warnf("Unsupported monitor type: %s", monitor.GetType())
provRequests = append(provRequests,
driver.GetProvisioningStatusRequest(monitor.Id, "MONITOR", models.MonitorProvisioningStatusERROR))
continue
property.LivenessTests = append(property.LivenessTests, &livenessTest)
}
property.LivenessTests = append(property.LivenessTests, &livenessTest)
provRequests = append(provRequests,
driver.GetProvisioningStatusRequest(monitor.Id, "MONITOR", models.MonitorProvisioningStatusACTIVE))
}
Expand Down

0 comments on commit 2928f47

Please sign in to comment.