Skip to content

Commit

Permalink
Remove format() calls for constant strings with no arguments
Browse files Browse the repository at this point in the history
  • Loading branch information
elharo authored and tdcmeehan committed Sep 26, 2024
1 parent 4bdd580 commit 27d38bf
Show file tree
Hide file tree
Showing 8 changed files with 9 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
import static com.facebook.presto.plugin.bigquery.BigQueryErrorCode.BIGQUERY_FAILED_TO_EXECUTE_QUERY;
import static com.google.cloud.bigquery.storage.v1beta1.Storage.ReadSession;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
import static java.util.stream.IntStream.range;
Expand Down Expand Up @@ -131,7 +130,7 @@ private List<BigQuerySplit> createEmptyProjection(TableId tableId, int actualPar
return splits;
}
catch (BigQueryException e) {
throw new PrestoException(BIGQUERY_FAILED_TO_EXECUTE_QUERY, format("Failed to compute empty projection"), e);
throw new PrestoException(BIGQUERY_FAILED_TO_EXECUTE_QUERY, "Failed to compute empty projection", e);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ private void loadSmooshFileMetadata()
BufferedReader in = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(metadata)));
String line = in.readLine();
if (line == null) {
throw new PrestoException(DRUID_SEGMENT_LOAD_ERROR, format("Malformed metadata file: first line should be version,maxChunkSize,numChunks, got null."));
throw new PrestoException(DRUID_SEGMENT_LOAD_ERROR, "Malformed metadata file: first line should be version,maxChunkSize,numChunks, got null.");
}

String[] splits = line.split(",");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ protected ScalarFunctionImplementation sqlInvokedFunctionToImplementation(SqlInv
throw new IllegalStateException(
format("SqlInvokedFunction %s has BUILTIN implementation type but %s cannot manage BUILTIN functions", function.getSignature().getName(), this.getClass()));
case CPP:
throw new IllegalStateException(format("Presto coordinator can not resolve implementation of CPP UDF functions"));
throw new IllegalStateException("Presto coordinator can not resolve implementation of CPP UDF functions");
default:
throw new IllegalStateException(format("Unknown function implementation type: %s", implementationType));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3509,7 +3509,7 @@ protected Optional<TableEncryptionProperties> getTableEncryptionPropertiesFromTa
}

if (seenColumns.contains(columnWithSubfield.toString())) {
throw new PrestoException(INVALID_TABLE_PROPERTY, format("The same column/subfield cannot have 2 encryption keys"));
throw new PrestoException(INVALID_TABLE_PROPERTY, "The same column/subfield cannot have 2 encryption keys");
}

if (columnWithSubfield.getSubfieldPath().isPresent()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6395,7 +6395,7 @@ public void testAddTableConstraints()

// Negative tests
assertQueryFails(addPrimaryKeyStmt, format("Primary key already exists for: %s.%s", getSession().getSchema().get(), tableName));
assertQueryFails(addUniqueConstraintStmt, format("Constraint already exists: 'uq3'"));
assertQueryFails(addUniqueConstraintStmt, "Constraint already exists: 'uq3'");
String dropNonExistentConstraint = format("ALTER TABLE %s.%s.%s DROP CONSTRAINT missingconstraint", getSession().getCatalog().get(), getSession().getSchema().get(), tableName);
assertQueryFails(dropNonExistentConstraint, "Constraint 'missingconstraint' not found");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -466,7 +466,7 @@ public static Optional<BiFunction<Integer, URI, Process>> getExternalWorkerLaunc
}
else {
Files.write(catalogDirectoryPath.resolve(format("%s.properties", catalogName)),
format("connector.name=hive").getBytes());
"connector.name=hive".getBytes());
}
// Add catalog with caching always enabled.
Files.write(catalogDirectoryPath.resolve(format("%scached.properties", catalogName)),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,7 @@ public int populateFromQueryResults(
throw new PinotException(
PINOT_UNEXPECTED_RESPONSE,
Optional.of(sql),
String.format("Expected data schema in the response"));
"Expected data schema in the response");
}
JsonNode columnDataTypes = dataSchema.get("columnDataTypes");
JsonNode columnNames = dataSchema.get("columnNames");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,10 +133,10 @@ public void testZeroFileCreatorForBucketedTable()
{
assertUpdate(
getSession(),
format("CREATE TABLE hive.hive_test.test_hive_orders_bucketed_join_zero_file WITH (bucketed_by=array['orderkey'], bucket_count=8) AS " +
"CREATE TABLE hive.hive_test.test_hive_orders_bucketed_join_zero_file WITH (bucketed_by=array['orderkey'], bucket_count=8) AS " +
"SELECT orderkey, custkey, orderstatus, totalprice, orderdate, orderpriority, clerk, shippriority, comment " +
"FROM orders_bucketed " +
"WHERE orderkey = 1"),
"WHERE orderkey = 1",
1);
}

Expand Down

0 comments on commit 27d38bf

Please sign in to comment.