diff --git a/src/homepageExperience/components/steps/java/ExecuteQuerySql.tsx b/src/homepageExperience/components/steps/java/ExecuteQuerySql.tsx index be91f98c1f..40ede60b22 100644 --- a/src/homepageExperience/components/steps/java/ExecuteQuerySql.tsx +++ b/src/homepageExperience/components/steps/java/ExecuteQuerySql.tsx @@ -19,29 +19,103 @@ FROM 'census' WHERE time >= now() - interval '1 hour' AND ('bees' IS NOT NULL OR 'ants' IS NOT NULL) order by time asc` - const query = `String sql = "SELECT * " + - "FROM 'census' " + - "WHERE time >= now() - interval '1 hour' " + - "AND ('bees' IS NOT NULL OR 'ants' IS NOT NULL) order by time asc"; + const query = ` String sql = "SELECT * " + + "FROM 'census' " + + "WHERE time >= now() - interval '5 minutes' " + + "AND ('$species1' IS NOT NULL OR '$species2' IS NOT NULL) order by time asc"; -System.out.printf("| %-5s | %-5s | %-8s | %-30s |%n", "ants", "bees", "location", "time"); -try (Stream stream = client.query(sql, new QueryOptions("${bucket}", QueryType.SQL))) { - stream.forEach(row -> System.out.printf("| %-5s | %-5s | %-8s | %-30s |%n", row[0], row[1], row[2], row[3])); -} + System.out.printf("| %-5s | %-5s | %-8s | %-30s |%n", "ants", "bees", "location", "time"); + + try (Stream ps = client.queryPoints(sql, + Map.of( + "species1", "bees", + "species2", "ants"), // Set Query Parameters + new QueryOptions("${bucket}", QueryType.SQL))) { // Set Query Options + ps.forEach(pv -> + System.out.printf("| %-5s | %-5s | %-8s | %-30s |%n", + IntOrDefault(pv, "ants", 0), + IntOrDefault(pv,"bees", 0), + pv.getTag("location"), + InstantTime(pv, Instant.ofEpochSecond(0)))); + } ` const queryPreview = `| ants | bees | location | time | -| null | 23 | Klamath | 2023-06-02T10:21:21.083529279 | -| 30 | null | Portland | 2023-06-02T10:21:22.276295461 | -| null | 28 | Klamath | 2023-06-02T10:21:23.462901032 | -| 32 | null | Portland | 2023-06-02T10:21:24.608998154 | -| null | 29 | Klamath | 2023-06-02T10:21:25.762346305 | -| 40 | null | Portland | 2023-06-02T10:21:26.901005154 | +| 0 | 23 | Klamath | 2024-12-18T15:58:07.275779579Z | +| 30 | 0 | Portland | 2024-12-18T15:58:08.275779579Z | +| 0 | 28 | Klamath | 2024-12-18T15:58:09.275779579Z | +| 32 | 0 | Portland | 2024-12-18T15:58:10.275779579Z | +| 0 | 29 | Klamath | 2024-12-18T15:58:11.275779579Z | +| 40 | 0 | Portland | 2024-12-18T15:58:12.275779579Z | +` + + const staticHelpers = ` private static long IntOrDefault(final PointValues pointValues, + final String key, + final long defaultValue){ + Long result = pointValues.getIntegerField(key); + return result == null ? defaultValue : result; + } + + private static Instant InstantTime(final PointValues pointValues, + final Instant replacement){ + Number raw = pointValues.getTimestamp(); + if(raw == null) { + if (replacement == null){ + return Instant.ofEpochSecond(0); + } + return replacement; + } + long stamp = raw.longValue(); + long sec = stamp / 1000000000; + long nanos = stamp % 1000000000; + return Instant.ofEpochSecond(sec, nanos); + } ` return ( <>

Execute a SQL Query

+

+ The query transport makes use of Apache Arrow Flight to shorten + processing time. When executing queries Arrow needs access to internal + JVM resources. This requires setting the following JVM argument:{' '} + --add-opens=java.base/java.nio=ALL-UNNAMED +

+

Java

+

This can be done with an environment variable:

+ +

Maven

+

+ This argument can also be added to MAVEN_OPTS: +

+ +

Gradle

+

+ With gradle this can be added to the build file, e.g. in{' '} + build.gradle.kts: +

+ +

Now let's query the data we wrote into the database with SQL. Here is what our query looks like on its own: @@ -57,11 +131,27 @@ try (Stream stream = client.query(sql, new QueryOptions("${bucket}", Q with a "census" measurement and either "bees" or "ants" fields.

- Let's use that SQL query in our Java code to show us the - results of what we have written. + The client API will pass through null values for mismatched or missing + tags, fields and timestamps. In anticipation of this possibility copy + the following static helper methods to the bottom of the{' '} + InfluxClientExample class. +

+ +

+ Now let's use the model SQL query in our Java code to show + us the results of what we have written. Furthermore, let's use the SQL + query parameters feature of the client library to make query calls more + dynamic.

- Add the following code to the WriteQueryExample class: + The following code replaces the fixed values of "bees" and "ants" with + the parameters $species1 and $species2. Add it + to the InfluxClientExample class after the write + code added in the previous step:

diff --git a/src/homepageExperience/components/steps/java/InitializeClientSql.tsx b/src/homepageExperience/components/steps/java/InitializeClientSql.tsx index b6850341eb..98515047c6 100644 --- a/src/homepageExperience/components/steps/java/InitializeClientSql.tsx +++ b/src/homepageExperience/components/steps/java/InitializeClientSql.tsx @@ -20,12 +20,12 @@ import java.time.Instant; import java.util.stream.Stream; import com.influxdb.v3.client.InfluxDBClient; +import com.influxdb.v3.client.Point; import com.influxdb.v3.client.query.QueryOptions; import com.influxdb.v3.client.query.QueryType; -import com.influxdb.v3.client.write.Point; import com.influxdb.v3.client.write.WriteOptions; -public final class WriteQueryExample { +public final class InfluxClientExample { public static void main(final String[] args) throws Exception { String hostUrl = "${url}"; diff --git a/src/homepageExperience/components/steps/java/InstallDependenciesSql.tsx b/src/homepageExperience/components/steps/java/InstallDependenciesSql.tsx index 2bfc5857cd..2eaac9c664 100644 --- a/src/homepageExperience/components/steps/java/InstallDependenciesSql.tsx +++ b/src/homepageExperience/components/steps/java/InstallDependenciesSql.tsx @@ -10,12 +10,12 @@ export const InstallDependenciesSql: FC = () => { } const mavenDependency = ` - com.influxdb - influxdb3-java - 0.2.0 -` + com.influxdb + influxdb3-java + 1.0.0 + ` const gradleDependency = `dependencies { - implementation "com.influxdb:influxdb3-java:0.2.0" + implementation("com.influxdb:influxdb3-java:1.0.0") }` return ( <> diff --git a/src/homepageExperience/components/steps/java/WriteDataSql.tsx b/src/homepageExperience/components/steps/java/WriteDataSql.tsx index 29b4b062dc..65fb142ed7 100644 --- a/src/homepageExperience/components/steps/java/WriteDataSql.tsx +++ b/src/homepageExperience/components/steps/java/WriteDataSql.tsx @@ -49,36 +49,57 @@ export const WriteDataSqlComponent = (props: OwnProps) => { onSelectBucket(bucket.name) }, [bucket, onSelectBucket]) - const codeSnippet = `String database = "${bucket.name}"; + const internalClass = ` static class CensusRecord { + String location; + String species; + int count; -Point[] points = new Point[] { - Point.measurement("census") - .addTag("location", "Klamath") - .addField("bees", 23), - Point.measurement("census") - .addTag("location", "Portland") - .addField("ants", 30), - Point.measurement("census") - .addTag("location", "Klamath") - .addField("bees", 28), - Point.measurement("census") - .addTag("location", "Portland") - .addField("ants", 32), - Point.measurement("census") - .addTag("location", "Klamath") - .addField("bees", 29), - Point.measurement("census") - .addTag("location", "Portland") - .addField("ants", 40) -}; + public CensusRecord(String location, String species, int count) { + this.location = location; + this.species = species; + this.count = count; + } -for (Point point : points) { - client.writePoint(point, new WriteOptions.Builder().database(database).build()); - - Thread.sleep(1000); // separate points by 1 second -} + public String getLocation() { + return location; + } + + public String getSpecies() { + return species; + } + + public int getCount() { + return count; + } + }` + + const dataPrep = ` String database = "${bucket.name}"; + + List records = Arrays.asList( + new CensusRecord("Klamath", "bees", 23), + new CensusRecord("Portland", "ants", 30), + new CensusRecord("Klamath", "bees", 28), + new CensusRecord("Portland", "ants", 32), + new CensusRecord("Klamath", "bees", 29), + new CensusRecord("Portland", "ants", 40) + ); + + List points = new ArrayList<>(); -System.out.println("Complete. Return to the InfluxDB UI."); + Instant stamp = Instant.now().minusSeconds(records.size()); + + for (CensusRecord record : records) { + points.add(Point.measurement("census") + .setTag("location", record.getLocation()) + .setIntegerField(record.getSpecies(), record.getCount()) + .setTimestamp(stamp) + ); + stamp = stamp.plusSeconds(1); + }` + + const codeSnippet = ` client.writePoints(points, new WriteOptions.Builder().database(database).build()); + + System.out.println("Complete. Return to the InfluxDB UI."); ` return ( @@ -239,7 +260,28 @@ System.out.println("Complete. Return to the InfluxDB UI."); let's write this data into our bucket.

- Add the following code to the WriteQueryExample class: + First, add the following internal class to the + InfluxClientExample above the main{' '} + method: +

+ +

+ Then, copy the following data preparation statements, into the{' '} + main method above the{' '} + try(InfluxDBClient ...) block: +

+ +

+ Finally, add the following code within the{' '} + try(InfluxDBClient ...) block: