Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

TestPR #28

Closed
wants to merge 16 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/CODEOWNERS
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# All owners
* @pinterest/logging
16 changes: 16 additions & 0 deletions .github/workflows/maven.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
name: PSC-Java Build

on: [pull_request]

jobs:
build:

runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
java-version: 1.8
- name: Build with Maven
run: mvn -B package --file pom.xml
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ Some highlights of PSC include:

***PSC is currently under active development.***

PSC currently supports [Apache Kafka](https://github.com/apache/kafka) and [MemQ](https://github.com/pinterest/memq) PubSub systems in Java, with support for more languages and PubSub systems coming soon.
PSC currently supports [Apache Kafka](https://github.com/apache/kafka) and [MemQ](https://github.com/pinterest/memq) PubSub systems in Java, with support for more languages coming soon. Contributions to adding support for other PubSub systems is welcome!

## Compatibility Matrix

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
import com.pinterest.psc.producer.PscProducer;
import com.pinterest.psc.producer.PscProducerMessage;
import com.pinterest.psc.producer.PscProducerTransactionalProperties;
import com.pinterest.psc.producer.kafka.KafkaProducerTransactionalProperties;
import com.pinterest.psc.serde.ByteArraySerializer;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.annotation.Internal;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@
import com.pinterest.psc.producer.PscBackendProducer;
import com.pinterest.psc.producer.PscProducer;
import com.pinterest.psc.producer.PscProducerMessage;
import com.pinterest.psc.producer.PscProducerTransactionalProperties;
import com.pinterest.psc.producer.PscProducerUtils;
import com.pinterest.psc.producer.creation.PscKafkaProducerCreator;
import com.pinterest.psc.producer.kafka.KafkaProducerTransactionalProperties;
import com.pinterest.psc.serde.ByteArraySerializer;
import com.pinterest.psc.serde.IntegerDeserializer;
import com.pinterest.psc.serde.IntegerSerializer;
Expand Down Expand Up @@ -434,7 +434,7 @@ public void testInitTransactions() throws ConfigurationException, ProducerExcept
Collection<PscBackendProducer> backendProducers = PscProducerUtils.getBackendProducersOf(pscProducer);
assertEquals(1, backendProducers.size());
PscBackendProducer backendProducer = backendProducers.iterator().next();
KafkaProducerTransactionalProperties transactionalProperties = (KafkaProducerTransactionalProperties) backendProducer.getTransactionalProperties();
PscProducerTransactionalProperties transactionalProperties = backendProducer.getTransactionalProperties();
long producerId = transactionalProperties.getProducerId();
assertEquals(0, transactionalProperties.getEpoch());

Expand All @@ -450,7 +450,7 @@ public void testInitTransactions() throws ConfigurationException, ProducerExcept
backendProducers = PscProducerUtils.getBackendProducersOf(pscProducer2);
assertEquals(1, backendProducers.size());
backendProducer = backendProducers.iterator().next();
transactionalProperties = (KafkaProducerTransactionalProperties) backendProducer.getTransactionalProperties();
transactionalProperties = backendProducer.getTransactionalProperties();
// it should bump the epoch each time for the same producer id
assertEquals(producerId, transactionalProperties.getProducerId());
assertEquals(1, transactionalProperties.getEpoch());
Expand All @@ -463,7 +463,7 @@ public void testInitTransactions() throws ConfigurationException, ProducerExcept
backendProducers = PscProducerUtils.getBackendProducersOf(pscProducer3);
assertEquals(1, backendProducers.size());
backendProducer = backendProducers.iterator().next();
transactionalProperties = (KafkaProducerTransactionalProperties) backendProducer.getTransactionalProperties();
transactionalProperties = backendProducer.getTransactionalProperties();
assertEquals(producerId, transactionalProperties.getProducerId());
assertEquals(2, transactionalProperties.getEpoch());
pscProducer3.abortTransaction();
Expand Down
64 changes: 36 additions & 28 deletions psc/src/main/java/com/pinterest/psc/common/PscUtils.java
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
package com.pinterest.psc.common;

import com.pinterest.psc.exception.startup.ConfigurationException;
import com.pinterest.psc.exception.startup.ServiceDiscoveryException;
import com.pinterest.psc.logging.PscLogger;
import software.amazon.awssdk.core.SdkSystemSetting;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.ConnectException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;
Expand All @@ -35,35 +32,36 @@ public static <T> T instantiateFromClass(String fqdn, Class<T> targetClass) thro
}

public static boolean isEc2Host() {
return doesEc2MetadataExist() || isSysVendorAws() || isAwsOsDetected();
}

protected static boolean doesEc2MetadataExist() {
try {
String hostAddressForEC2MetadataService = SdkSystemSetting.AWS_EC2_METADATA_SERVICE_ENDPOINT.getStringValueOrThrow();
if (hostAddressForEC2MetadataService == null)
return false;
URL url = new URL(hostAddressForEC2MetadataService + "/latest/dynamic/instance-identity/document");
HttpURLConnection con = (HttpURLConnection) url.openConnection();
con.setRequestMethod("GET");
con.setConnectTimeout(1000);
con.setReadTimeout(1000);
con.connect();
con.disconnect();
return con.getResponseCode() == 200;
} catch (ConnectException connectException) {
return isEc2HostAlternate();
} catch (Exception exception) {
logger.warn("Error occurred when determining the host type.", new ServiceDiscoveryException(exception));
ProcessBuilder processBuilder = new ProcessBuilder("ec2metadata");
processBuilder.redirectErrorStream(true);
Process process = processBuilder.start();
return process.waitFor() == 0;
} catch (Exception e) {
logger.info("Could not detect if host is EC2 from ec2metadata.", e);
return false;
}
}

protected static boolean isEc2HostAlternate() {
ProcessBuilder processBuilder = new ProcessBuilder("ec2metadata");
processBuilder.redirectErrorStream(true);
protected static boolean isSysVendorAws() {
try {
return getFileContent("/sys/devices/virtual/dmi/id/sys_vendor").trim().equals("Amazon EC2");
} catch (Exception e) {
logger.info("Could not detect if host is EC2 from sys vendor.", e);
return false;
}
}

protected static boolean isAwsOsDetected() {
try {
Process process = processBuilder.start();
return process.waitFor() == 0;
} catch (IOException | InterruptedException e) {
logger.info("Error occurred when running the `ec2metadata` command. Will check OS version as last resort.");
return System.getProperty("os.version").contains("aws");
} catch (Exception e) {
logger.info("Could not detect if host is EC2 from os version.", e);
return false;
}
}

Expand All @@ -89,4 +87,14 @@ public static String getStackTraceAsString(Exception exception) {
exception.printStackTrace(printWriter);
return stringWriter.toString();
}
}

public static String getFileContent(String path) throws IOException {
StringBuilder content = new StringBuilder();
BufferedReader reader = new BufferedReader(new FileReader(path));
String line;
while ((line = reader.readLine()) != null) {
content.append(line).append(System.lineSeparator());
}
return content.toString();
}
}
12 changes: 12 additions & 0 deletions psc/src/main/java/com/pinterest/psc/common/kafka/KafkaErrors.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.apache.kafka.common.errors.TopicAuthorizationException;
import org.apache.kafka.common.errors.WakeupException;

import java.util.ConcurrentModificationException;
import java.util.LinkedHashMap;
import java.util.Map;

Expand Down Expand Up @@ -165,6 +166,17 @@ ImmutableMap.<Class<? extends Exception>, Map<String, PscErrorHandler.ConsumerAc
}}
)

// ConcurrentModificationException
.put(
ConcurrentModificationException.class,
new LinkedHashMap<String, PscErrorHandler.ConsumerAction>(1) {{
put(
"org.apache.kafka.common.metrics.JmxReporter.getMBeanName", // known case of CME - we will swallow it
new PscErrorHandler.ConsumerAction(PscErrorHandler.ActionType.NONE, ConsumerException.class)
);
}}
)

.build();

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,11 @@ public class PscConfiguration extends PropertiesConfiguration {
*/
public final static String PCS_AUTO_RESOLUTION_RETRY_COUNT = "psc.auto.resolution.retry.count";

/**
* Whether to proactively reset consumer or producer based on approaching SSL certificate expiry
*/
public final static String PSC_PROACTIVE_SSL_RESET_ENABLED = "psc.proactive.ssl.reset.enabled";

private final static String PSC_CLIENT_TYPE = "psc.client.type";
public final static String PSC_CLIENT_TYPE_CONSUMER = "consumer";
public final static String PSC_CLIENT_TYPE_PRODUCER = "producer";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ public class PscConfigurationInternal {
private boolean autoResolutionEnabled;
private int autoResolutionRetryCount;
private MetricsReporterConfiguration metricsReporterConfiguration;
private boolean proactiveSslResetEnabled;

public PscConfigurationInternal() {
}
Expand Down Expand Up @@ -206,6 +207,10 @@ private void validateGenericConfiguration(Map<String, Exception> invalidConfigs)
Integer autoResolutionRetryCount = verifyConfigHasValue(pscConfiguration, PscConfiguration.PCS_AUTO_RESOLUTION_RETRY_COUNT, Integer.class, invalidConfigs);
this.autoResolutionRetryCount = autoResolutionRetryCount != null ? autoResolutionRetryCount : 5;
}

// SSL reset
Boolean proactiveSslResetEnabled = verifyConfigHasValue(pscConfiguration, PscConfiguration.PSC_PROACTIVE_SSL_RESET_ENABLED, Boolean.class, invalidConfigs);
this.proactiveSslResetEnabled = proactiveSslResetEnabled != null ? proactiveSslResetEnabled : false; // false by default
}

public void logConfiguration() {
Expand Down Expand Up @@ -733,6 +738,10 @@ public boolean isAutoResolutionEnabled() {
return autoResolutionEnabled;
}

public boolean isProactiveSslResetEnabled() {
return proactiveSslResetEnabled;
}

public int getAutoResolutionRetryCount() {
return autoResolutionRetryCount;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ public void initializeBackend(ServiceDiscoveryConfig discoveryConfig, TopicUri t
properties, pscConfigurationInternal, Collections.singleton(topicUri));
logger.info("Initialized PscKafkaConsumer with SSL cert expiry time at " + sslCertificateExpiryTimeInMillis);
}
logger.info("Proactive SSL reset enabled: {}", pscConfigurationInternal.isProactiveSslResetEnabled());
}

@Override
Expand Down Expand Up @@ -1042,6 +1043,11 @@ protected void maybeResetBackendClient() throws ConsumerException {
// reset if SSL enabled && cert is expired
if (isSslEnabledInAnyActiveSusbcriptionOrAssignment &&
(System.currentTimeMillis() >= sslCertificateExpiryTimeInMillis)) {
if (!pscConfigurationInternal.isProactiveSslResetEnabled()) {
logger.info("Skipping reset of client even though SSL certificate is approaching expiry at {}" +
" because proactive reset is disabled", sslCertificateExpiryTimeInMillis);
return;
}
if (KafkaSslUtils.keyStoresExist(properties)) {
logger.info("Resetting backend Kafka client due to cert expiry at " +
sslCertificateExpiryTimeInMillis);
Expand Down
7 changes: 0 additions & 7 deletions psc/src/main/java/com/pinterest/psc/producer/PscProducer.java
Original file line number Diff line number Diff line change
Expand Up @@ -316,13 +316,6 @@ public Future<MessageId> send(PscProducerMessage<K, V> pscProducerMessage, Callb

Future<MessageId> future = backendProducer.send(pscProducerMessage, callback);

PscMetricRegistryManager.getInstance().incrementCounterMetric(
pscProducerMessage.getTopicUriPartition().getTopicUri(),
pscProducerMessage.getPartition(),
PscMetrics.PSC_PRODUCER_PRODUCE_MESSAGES_METRIC,
pscConfigurationInternal
);

return future;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@

import com.pinterest.psc.producer.PscProducerTransactionalProperties;

/**
* Moved to PscProducerTransactionalProperties
*/
@Deprecated
public class KafkaProducerTransactionalProperties extends PscProducerTransactionalProperties {

public KafkaProducerTransactionalProperties(long producerId, short epoch) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -716,25 +716,14 @@ public void resumeTransaction(PscBackendProducer otherBackendProducer) throws Pr
);
}

resumeTransaction(new KafkaProducerTransactionalProperties(producerId, epoch));
resumeTransaction(new PscProducerTransactionalProperties(producerId, epoch));
}

@Override
public void resumeTransaction(PscProducerTransactionalProperties pscProducerTransactionalProperties) throws ProducerException {
if (kafkaProducer == null)
handleUninitializedKafkaProducer("resumeTransaction()");

if (!(pscProducerTransactionalProperties instanceof KafkaProducerTransactionalProperties)) {
handleException(
new BackendProducerException(
"[Kafka] Unexpected producer transaction state type: " + pscProducerTransactionalProperties.getClass().getCanonicalName(),
PscUtils.BACKEND_TYPE_KAFKA
), true
);
}

KafkaProducerTransactionalProperties kafkaProducerTransactionalProperties = (KafkaProducerTransactionalProperties) pscProducerTransactionalProperties;

try {
Object transactionManager = PscCommon.getField(kafkaProducer, "transactionManager");
synchronized (kafkaProducer) {
Expand All @@ -752,8 +741,8 @@ public void resumeTransaction(PscProducerTransactionalProperties pscProducerTran
PscCommon.invoke(topicPartitionBookkeeper, "reset");

Object producerIdAndEpoch = PscCommon.getField(transactionManager, "producerIdAndEpoch");
PscCommon.setField(producerIdAndEpoch, "producerId", kafkaProducerTransactionalProperties.getProducerId());
PscCommon.setField(producerIdAndEpoch, "epoch", kafkaProducerTransactionalProperties.getEpoch());
PscCommon.setField(producerIdAndEpoch, "producerId", pscProducerTransactionalProperties.getProducerId());
PscCommon.setField(producerIdAndEpoch, "epoch", pscProducerTransactionalProperties.getEpoch());

PscCommon.invoke(
transactionManager,
Expand Down Expand Up @@ -785,7 +774,7 @@ public PscProducerTransactionalProperties getTransactionalProperties() throws Pr

Object transactionManager = PscCommon.getField(kafkaProducer, "transactionManager");
Object producerIdAndEpoch = PscCommon.getField(transactionManager, "producerIdAndEpoch");
return new KafkaProducerTransactionalProperties(
return new PscProducerTransactionalProperties(
(long) PscCommon.getField(producerIdAndEpoch, "producerId"),
(short) PscCommon.getField(producerIdAndEpoch, "epoch")
);
Expand Down
1 change: 1 addition & 0 deletions psc/src/main/resources/psc.conf
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ psc.config.topic.uri=

psc.auto.resolution.enabled=true
psc.auto.resolution.retry.count=5
psc.proactive.ssl.reset.enabled=false

#psc.metrics
#valid options com.pinterest.psc.metrics.NullMetricsReporter, com.pinterest.psc.metrics.OpenTSDBMetricsReporter
Expand Down
Loading