diff --git a/internal/driver/akamai/agent.go b/internal/driver/akamai/agent.go index e6d730cd..1735a52f 100644 --- a/internal/driver/akamai/agent.go +++ b/internal/driver/akamai/agent.go @@ -156,7 +156,7 @@ func (s *AkamaiAgent) WorkerThread() { log.Error(err.Error()) } - if err := s.FetchAndSyncDomains(domains); err != nil { + if err := s.FetchAndSyncDomains(domains, true); err != nil { log.Error(err.Error()) } case <-s.workerTicker.C: // Activate periodically @@ -170,7 +170,7 @@ func (s *AkamaiAgent) WorkerThread() { log.Error(err.Error()) } - if err := s.FetchAndSyncDomains(nil); err != nil { + if err := s.FetchAndSyncDomains(nil, false); err != nil { log.Error(err.Error()) } diff --git a/internal/driver/akamai/domain.go b/internal/driver/akamai/domain.go index 0be7dc9b..62ed6a40 100644 --- a/internal/driver/akamai/domain.go +++ b/internal/driver/akamai/domain.go @@ -50,7 +50,7 @@ func (s *AkamaiAgent) EnsureDomain(domainType string) error { return nil } -func (s *AkamaiAgent) FetchAndSyncDomains(domains []string) error { +func (s *AkamaiAgent) FetchAndSyncDomains(domains []string, force bool) error { if s.executing { return nil } @@ -58,13 +58,13 @@ func (s *AkamaiAgent) FetchAndSyncDomains(domains []string) error { s.executing = true defer func() { s.executing = false }() - log.Debugf("Running FetchAndSyncDomains(domains=%+v)", domains) + log.Debugf("Running FetchAndSyncDomains(domains=%+v, force=%t)", domains, force) response, err := s.rpc.GetDomains(context.Background(), &server.SearchRequest{ Provider: "akamai", PageNumber: 0, ResultPerPage: 1, FullyPopulated: true, - Pending: domains == nil, + Pending: domains == nil && !force, Ids: domains, }) if err != nil { diff --git a/internal/driver/akamai/property.go b/internal/driver/akamai/property.go index 8e0727db..33267c6c 100644 --- a/internal/driver/akamai/property.go +++ b/internal/driver/akamai/property.go @@ -131,6 +131,13 @@ MEMBERLOOP: driver.GetProvisioningStatusRequest(member.Id, "MEMBER", "ACTIVE")) } + // due shortcoming of individual liveness test per member, we have to replicate a monitor per unique member port + // collect unique member ports + uniquePorts := make(map[uint32]interface{}) + for _, member := range members { + uniquePorts[member.GetPort()] = nil + } + // Add new Monitors for _, monitor := range monitors { if monitor.ProvisioningStatus == models.MonitorProvisioningStatusPENDINGDELETE { @@ -139,45 +146,43 @@ MEMBERLOOP: continue } - livenessTest := gtm.LivenessTest{ - Name: monitor.GetId(), - TestObjectProtocol: MONITOR_LIVENESS_TYPE_MAP[monitor.GetType()], - TestInterval: int(monitor.GetInterval()), - TestTimeout: float32(monitor.GetTimeout()), - Disabled: !monitor.GetAdminStateUp(), - } - - switch monitor.GetType() { - case rpcmodels.Monitor_HTTPS: - fallthrough - case rpcmodels.Monitor_HTTP: - if monitor.GetSend() == "" { - livenessTest.TestObject = "/" - } else { - livenessTest.TestObject = monitor.GetSend() - } - var testPort uint32 = 80 - for _, member := range members { - testPort = member.GetPort() - break + monitorLoop: + for testPort := range uniquePorts { + livenessTest := gtm.LivenessTest{ + Name: fmt.Sprintf("%s-%d", monitor.GetId(), testPort), + TestObjectPort: int(testPort), + TestObjectProtocol: MONITOR_LIVENESS_TYPE_MAP[monitor.GetType()], + TestInterval: int(monitor.GetInterval()), + TestTimeout: float32(monitor.GetTimeout()), + Disabled: !monitor.GetAdminStateUp(), } - livenessTest.TestObjectPort = int(testPort) - livenessTest.HTTPHeaders = []*gtm.HTTPHeader{{Name: "Host", Value: domain.GetFqdn()}} - if domainName := monitor.GetDomainName(); domainName != "" { - livenessTest.HTTPHeaders = []*gtm.HTTPHeader{{Name: "Host", Value: domainName}} + + switch monitor.GetType() { + case rpcmodels.Monitor_HTTPS: + fallthrough + case rpcmodels.Monitor_HTTP: + if monitor.GetSend() == "" { + livenessTest.TestObject = "/" + } else { + livenessTest.TestObject = monitor.GetSend() + } + livenessTest.HTTPHeaders = []*gtm.HTTPHeader{{Name: "Host", Value: domain.GetFqdn()}} + if domainName := monitor.GetDomainName(); domainName != "" { + livenessTest.HTTPHeaders = []*gtm.HTTPHeader{{Name: "Host", Value: domainName}} + } + livenessTest.HTTPMethod = swag.String(monitor.GetMethod().String()) + case rpcmodels.Monitor_TCP: + livenessTest.RequestString = monitor.GetSend() + livenessTest.ResponseString = monitor.GetReceive() + default: + // unsupported type + log.Warnf("Unsupported monitor type: %s", monitor.GetType()) + provRequests = append(provRequests, + driver.GetProvisioningStatusRequest(monitor.Id, "MONITOR", models.MonitorProvisioningStatusERROR)) + continue monitorLoop } - livenessTest.HTTPMethod = swag.String(monitor.GetMethod().String()) - case rpcmodels.Monitor_TCP: - livenessTest.RequestString = monitor.GetSend() - livenessTest.ResponseString = monitor.GetReceive() - default: - // unsupported type - log.Warnf("Unsupported monitor type: %s", monitor.GetType()) - provRequests = append(provRequests, - driver.GetProvisioningStatusRequest(monitor.Id, "MONITOR", models.MonitorProvisioningStatusERROR)) - continue + property.LivenessTests = append(property.LivenessTests, &livenessTest) } - property.LivenessTests = append(property.LivenessTests, &livenessTest) provRequests = append(provRequests, driver.GetProvisioningStatusRequest(monitor.Id, "MONITOR", models.MonitorProvisioningStatusACTIVE)) }