Skip to content

Commit

Permalink
Merge pull request #165 from tinybit/add_oxla_db
Browse files Browse the repository at this point in the history
Added Oxla.com DB
  • Loading branch information
tinybit authored Feb 1, 2024
2 parents cd86d46 + c37a35b commit a7437ed
Show file tree
Hide file tree
Showing 5 changed files with 345 additions and 0 deletions.
123 changes: 123 additions & 0 deletions oxla/benchmark.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
#!/bin/bash -e

# cleanup
sudo ps aux | grep fakes3 | grep -v grep | awk '{print $2}' | sudo xargs -r kill -9
sudo ps aux | grep oxla | grep -v grep | awk '{print $2}' | sudo xargs -r kill -9

# docker
sudo rm /usr/share/keyrings/docker-archive-keyring.gpg
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
sudo apt update
sudo apt install -y docker-ce

# base
sudo apt-get install -y postgresql-client curl wget apt-transport-https ca-certificates software-properties-common gnupg2 parallel
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential

# ruby and fake S3
sudo apt install -y ruby-full
sudo gem install bundler fakes3 webrick sorted_set

# install aws cli tools
sudo rm /usr/local/bin/aws
sudo rm /usr/local/bin/aws_completer
sudo rm -rf /usr/local/aws-cli
sudo rm -rf ~/.aws/ aws

curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install --update
/usr/local/bin/aws --version
rm -f awscliv2.zip

# configure aws
mkdir -p ~/.aws
echo -e "[default]\nregion = none" > ~/.aws/config
echo -e "[default]\naws_access_key_id = none\naws_secret_access_key = none" > ~/.aws/credentials

# run fake S3
sudo ps aux | grep fakes3 | grep -v grep | awk '{print $2}' | sudo xargs -r kill -9
sudo rm -rf /mnt/fakes3_root
sudo mkdir -p /mnt/fakes3_root
sudo chmod a+rw /mnt/fakes3_root -R
fakes3 -r /mnt/fakes3_root -H 0.0.0.0 -p 4569 --license license.pdf > /dev/null 2>&1 &
sleep 10 # waiting for container start

# download dataset
wget --no-verbose --continue 'https://datasets.clickhouse.com/hits_compatible/hits.tsv.gz'
gzip -d hits.tsv.gz
chmod 777 ~ hits.tsv

# convert dataset to csv
rm -f part_*.csv
curl https://clickhouse.com/ | sh
./clickhouse local --query "SELECT * FROM 'hits.tsv' INTO OUTFILE 'hits.csv'"
rm hits.tsv

# prepare digestable parts (5m rows each) of hits.csv
split -l 5000000 hits.csv part_
for file in part_*; do mv "$file" "${file}.csv"; done

# upload dataset (prepared parts) to fake S3 bucket
aws s3 mb s3://my-new-bucket --endpoint-url http://localhost:4569

for file in part_*.csv; do
echo "Processing file: $file"

# copy the file to the S3 bucket
aws s3 cp "./$file" s3://my-new-bucket --endpoint-url http://localhost:4569 > /dev/null 2>&1

# clean-up tmp parts left after upload
TMPPARTS=$(aws s3api list-objects --bucket my-new-bucket --query "Contents[?contains(Key, '_${file}_')].Key" --output text --endpoint-url http://localhost:4569)
echo $TMPPARTS | tr ' ' '\n' | grep . | parallel -j16 aws s3api delete-object --bucket my-new-bucket --key {} --endpoint-url http://localhost:4569
done

# get and configure Oxla image
echo "Install and run Oxla."

sudo ps aux | grep oxla | grep -v grep | awk '{print $2}' | sudo xargs -r kill -9

sudo docker run --rm -p 5432:5432 --name oxlacontainer public.ecr.aws/oxla/release:latest > /dev/null 2>&1 &
sleep 10 # waiting for container start and db initialisation (leader election, etc.)

sudo docker exec oxlacontainer /bin/bash -c "sed -i 's#endpoint: \"\"#endpoint: \"http://localhost:4569\"#g' oxla/default_config.yml"
sudo docker exec oxlacontainer /bin/bash -c "sed -i 's#endpoint:.*#endpoint: '\''http://localhost:4569'\''#g' oxla/startup_config/config.yml"
sudo docker rmi oxla-configured-image:latest > /dev/null 2>&1 || echo "" > /dev/null
sudo docker commit oxlacontainer oxla-configured-image

sudo ps aux | grep oxla | grep -v grep | awk '{print $2}' | sudo xargs -r kill -9

# run oxla
sudo docker run --rm --net=host --name oxlacontainer oxla-configured-image > /dev/null 2>&1 &
sleep 10 # waiting for container start and db initialisation (leader election, etc.)

# create table and ingest data
export PGCLIENTENCODING=UTF8
psql -h localhost -p 5432 -U postgres -t -c 'CREATE SCHEMA test'
psql -h localhost -p 5432 -U postgres -d test -t < create.sql

for file in part_*.csv; do
echo "Processing file: $file"
psql -h localhost -p 5432 -U postgres -d test -t -c '\timing' -c "COPY hits FROM 's3://my-new-bucket/$file';"
aws s3api delete-object --bucket my-new-bucket --key "$file" --endpoint-url http://localhost:4569
done

# get ingested data size
echo "data size after ingest:"
sudo docker exec oxlacontainer /bin/bash -c "du -s oxla/data"

# wait for merges to finish
sleep 30

# kill fake S3 and remove its data
ps aux | grep fakes3 | grep -v grep | awk '{print $2}' | sudo xargs -r kill -9
sudo rm -rf /mnt/fakes3_root

# run benchmark
echo "running benchmark..."
./run.sh 2>&1 | tee log.txt

# format results
cat log.txt | grep -oP 'Time: \d+\.\d+ ms' | sed -r -e 's/Time: ([0-9]+\.[0-9]+) ms/\1/' |
awk '{ if (i % 3 == 0) { printf "[" }; printf $1 / 1000; if (i % 3 != 2) { printf "," } else { print "]," }; ++i; }'
108 changes: 108 additions & 0 deletions oxla/create.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
CREATE TABLE hits
(
WatchID BIGINT NOT NULL,
JavaEnable INT NOT NULL,
Title TEXT NOT NULL,
GoodEvent INT NOT NULL,
EventTime TIMESTAMP NOT NULL,
EventDate Date NOT NULL,
CounterID INTEGER NOT NULL,
ClientIP INTEGER NOT NULL,
RegionID INTEGER NOT NULL,
UserID BIGINT NOT NULL,
CounterClass INT NOT NULL,
OS INT NOT NULL,
UserAgent INT NOT NULL,
URL TEXT NOT NULL,
Referer TEXT NOT NULL,
IsRefresh INT NOT NULL,
RefererCategoryID INT NOT NULL,
RefererRegionID INTEGER NOT NULL,
URLCategoryID INT NOT NULL,
URLRegionID INTEGER NOT NULL,
ResolutionWidth INT NOT NULL,
ResolutionHeight INT NOT NULL,
ResolutionDepth INT NOT NULL,
FlashMajor INT NOT NULL,
FlashMinor INT NOT NULL,
FlashMinor2 TEXT NOT NULL,
NetMajor INT NOT NULL,
NetMinor INT NOT NULL,
UserAgentMajor INT NOT NULL,
UserAgentMinor TEXT NOT NULL,
CookieEnable INT NOT NULL,
JavascriptEnable INT NOT NULL,
IsMobile INT NOT NULL,
MobilePhone INT NOT NULL,
MobilePhoneModel TEXT NOT NULL,
Params TEXT NOT NULL,
IPNetworkID INTEGER NOT NULL,
TraficSourceID INT NOT NULL,
SearchEngineID INT NOT NULL,
SearchPhrase TEXT NOT NULL,
AdvEngineID INT NOT NULL,
IsArtifical INT NOT NULL,
WindowClientWidth INT NOT NULL,
WindowClientHeight INT NOT NULL,
ClientTimeZone INT NOT NULL,
ClientEventTime TIMESTAMP NOT NULL,
SilverlightVersion1 INT NOT NULL,
SilverlightVersion2 INT NOT NULL,
SilverlightVersion3 INTEGER NOT NULL,
SilverlightVersion4 INT NOT NULL,
PageCharset TEXT NOT NULL,
CodeVersion INTEGER NOT NULL,
IsLink INT NOT NULL,
IsDownload INT NOT NULL,
IsNotBounce INT NOT NULL,
FUniqID BIGINT NOT NULL,
OriginalURL TEXT NOT NULL,
HID INTEGER NOT NULL,
IsOldCounter INT NOT NULL,
IsEvent INT NOT NULL,
IsParameter INT NOT NULL,
DontCountHits INT NOT NULL,
WithHash INT NOT NULL,
HitColor CHAR NOT NULL,
LocalEventTime TIMESTAMP NOT NULL,
Age INT NOT NULL,
Sex INT NOT NULL,
Income INT NOT NULL,
Interests INT NOT NULL,
Robotness INT NOT NULL,
RemoteIP INTEGER NOT NULL,
WindowName INTEGER NOT NULL,
OpenerName INTEGER NOT NULL,
HistoryLength INT NOT NULL,
BrowserLanguage TEXT NOT NULL,
BrowserCountry TEXT NOT NULL,
SocialNetwork TEXT NOT NULL,
SocialAction TEXT NOT NULL,
HTTPError INT NOT NULL,
SendTiming INTEGER NOT NULL,
DNSTiming INTEGER NOT NULL,
ConnectTiming INTEGER NOT NULL,
ResponseStartTiming INTEGER NOT NULL,
ResponseEndTiming INTEGER NOT NULL,
FetchTiming INTEGER NOT NULL,
SocialSourceNetworkID INT NOT NULL,
SocialSourcePage TEXT NOT NULL,
ParamPrice BIGINT NOT NULL,
ParamOrderID TEXT NOT NULL,
ParamCurrency TEXT NOT NULL,
ParamCurrencyID INT NOT NULL,
OpenstatServiceName TEXT NOT NULL,
OpenstatCampaignID TEXT NOT NULL,
OpenstatAdID TEXT NOT NULL,
OpenstatSourceID TEXT NOT NULL,
UTMSource TEXT NOT NULL,
UTMMedium TEXT NOT NULL,
UTMCampaign TEXT NOT NULL,
UTMContent TEXT NOT NULL,
UTMTerm TEXT NOT NULL,
FromTag TEXT NOT NULL,
HasGCLID INT NOT NULL,
RefererHash BIGINT NOT NULL,
URLHash BIGINT NOT NULL,
CLID INTEGER NOT NULL
);
43 changes: 43 additions & 0 deletions oxla/queries.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
SELECT COUNT(*) FROM hits;
SELECT COUNT(*) FROM hits WHERE AdvEngineID <> 0;
SELECT SUM(AdvEngineID), COUNT(*), AVG(ResolutionWidth) FROM hits;
SELECT AVG(UserID) FROM hits;
SELECT COUNT(DISTINCT UserID) FROM hits;
SELECT COUNT(DISTINCT SearchPhrase) FROM hits;
SELECT MIN(EventDate), MAX(EventDate) FROM hits;
SELECT AdvEngineID, COUNT(*) FROM hits WHERE AdvEngineID <> 0 GROUP BY AdvEngineID ORDER BY COUNT(*) DESC;
SELECT RegionID, COUNT(DISTINCT UserID) AS u FROM hits GROUP BY RegionID ORDER BY u DESC LIMIT 10;
SELECT RegionID, SUM(AdvEngineID), COUNT(*) AS c, AVG(ResolutionWidth), COUNT(DISTINCT UserID) FROM hits GROUP BY RegionID ORDER BY c DESC LIMIT 10;
SELECT MobilePhoneModel, COUNT(DISTINCT UserID) AS u FROM hits WHERE MobilePhoneModel <> '' GROUP BY MobilePhoneModel ORDER BY u DESC LIMIT 10;
SELECT MobilePhone, MobilePhoneModel, COUNT(DISTINCT UserID) AS u FROM hits WHERE MobilePhoneModel <> '' GROUP BY MobilePhone, MobilePhoneModel ORDER BY u DESC LIMIT 10;
SELECT SearchPhrase, COUNT(*) AS c FROM hits WHERE SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10;
SELECT SearchPhrase, COUNT(DISTINCT UserID) AS u FROM hits WHERE SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY u DESC LIMIT 10;
SELECT SearchEngineID, SearchPhrase, COUNT(*) AS c FROM hits WHERE SearchPhrase <> '' GROUP BY SearchEngineID, SearchPhrase ORDER BY c DESC LIMIT 10;
SELECT UserID, COUNT(*) FROM hits GROUP BY UserID ORDER BY COUNT(*) DESC LIMIT 10;
SELECT UserID, SearchPhrase, COUNT(*) FROM hits GROUP BY UserID, SearchPhrase ORDER BY COUNT(*) DESC LIMIT 10;
SELECT UserID, SearchPhrase, COUNT(*) FROM hits GROUP BY UserID, SearchPhrase LIMIT 10;
SELECT UserID, extract(minute FROM EventTime) AS m, SearchPhrase, COUNT(*) FROM hits GROUP BY UserID, m, SearchPhrase ORDER BY COUNT(*) DESC LIMIT 10;
SELECT UserID FROM hits WHERE UserID = 435090932899640449;
SELECT COUNT(*) FROM hits WHERE URL LIKE '%google%';
SELECT NULL;
SELECT NULL;
SELECT * FROM hits WHERE URL LIKE '%google%' ORDER BY EventTime LIMIT 10;
SELECT SearchPhrase FROM hits WHERE SearchPhrase <> '' ORDER BY EventTime LIMIT 10;
SELECT SearchPhrase FROM hits WHERE SearchPhrase <> '' ORDER BY SearchPhrase LIMIT 10;
SELECT SearchPhrase FROM hits WHERE SearchPhrase <> '' ORDER BY EventTime, SearchPhrase LIMIT 10;
SELECT CounterID, AVG(length(URL)) AS l, COUNT(*) AS c FROM hits WHERE URL <> '' GROUP BY CounterID HAVING COUNT(*) > 100000 ORDER BY l DESC LIMIT 25;
SELECT NULL;
SELECT SUM(ResolutionWidth), SUM(ResolutionWidth + 1), SUM(ResolutionWidth + 2), SUM(ResolutionWidth + 3), SUM(ResolutionWidth + 4), SUM(ResolutionWidth + 5), SUM(ResolutionWidth + 6), SUM(ResolutionWidth + 7), SUM(ResolutionWidth + 8), SUM(ResolutionWidth + 9), SUM(ResolutionWidth + 10), SUM(ResolutionWidth + 11), SUM(ResolutionWidth + 12), SUM(ResolutionWidth + 13), SUM(ResolutionWidth + 14), SUM(ResolutionWidth + 15), SUM(ResolutionWidth + 16), SUM(ResolutionWidth + 17), SUM(ResolutionWidth + 18), SUM(ResolutionWidth + 19), SUM(ResolutionWidth + 20), SUM(ResolutionWidth + 21), SUM(ResolutionWidth + 22), SUM(ResolutionWidth + 23), SUM(ResolutionWidth + 24), SUM(ResolutionWidth + 25), SUM(ResolutionWidth + 26), SUM(ResolutionWidth + 27), SUM(ResolutionWidth + 28), SUM(ResolutionWidth + 29), SUM(ResolutionWidth + 30), SUM(ResolutionWidth + 31), SUM(ResolutionWidth + 32), SUM(ResolutionWidth + 33), SUM(ResolutionWidth + 34), SUM(ResolutionWidth + 35), SUM(ResolutionWidth + 36), SUM(ResolutionWidth + 37), SUM(ResolutionWidth + 38), SUM(ResolutionWidth + 39), SUM(ResolutionWidth + 40), SUM(ResolutionWidth + 41), SUM(ResolutionWidth + 42), SUM(ResolutionWidth + 43), SUM(ResolutionWidth + 44), SUM(ResolutionWidth + 45), SUM(ResolutionWidth + 46), SUM(ResolutionWidth + 47), SUM(ResolutionWidth + 48), SUM(ResolutionWidth + 49), SUM(ResolutionWidth + 50), SUM(ResolutionWidth + 51), SUM(ResolutionWidth + 52), SUM(ResolutionWidth + 53), SUM(ResolutionWidth + 54), SUM(ResolutionWidth + 55), SUM(ResolutionWidth + 56), SUM(ResolutionWidth + 57), SUM(ResolutionWidth + 58), SUM(ResolutionWidth + 59), SUM(ResolutionWidth + 60), SUM(ResolutionWidth + 61), SUM(ResolutionWidth + 62), SUM(ResolutionWidth + 63), SUM(ResolutionWidth + 64), SUM(ResolutionWidth + 65), SUM(ResolutionWidth + 66), SUM(ResolutionWidth + 67), SUM(ResolutionWidth + 68), SUM(ResolutionWidth + 69), SUM(ResolutionWidth + 70), SUM(ResolutionWidth + 71), SUM(ResolutionWidth + 72), SUM(ResolutionWidth + 73), SUM(ResolutionWidth + 74), SUM(ResolutionWidth + 75), SUM(ResolutionWidth + 76), SUM(ResolutionWidth + 77), SUM(ResolutionWidth + 78), SUM(ResolutionWidth + 79), SUM(ResolutionWidth + 80), SUM(ResolutionWidth + 81), SUM(ResolutionWidth + 82), SUM(ResolutionWidth + 83), SUM(ResolutionWidth + 84), SUM(ResolutionWidth + 85), SUM(ResolutionWidth + 86), SUM(ResolutionWidth + 87), SUM(ResolutionWidth + 88), SUM(ResolutionWidth + 89) FROM hits;
SELECT SearchEngineID, ClientIP, COUNT(*) AS c, SUM(IsRefresh), AVG(ResolutionWidth) FROM hits WHERE SearchPhrase <> '' GROUP BY SearchEngineID, ClientIP ORDER BY c DESC LIMIT 10;
SELECT WatchID, ClientIP, COUNT(*) AS c, SUM(IsRefresh), AVG(ResolutionWidth) FROM hits WHERE SearchPhrase <> '' GROUP BY WatchID, ClientIP ORDER BY c DESC LIMIT 10;
SELECT WatchID, ClientIP, COUNT(*) AS c, SUM(IsRefresh), AVG(ResolutionWidth) FROM hits GROUP BY WatchID, ClientIP ORDER BY c DESC LIMIT 10;
SELECT URL, COUNT(*) AS c FROM hits GROUP BY URL ORDER BY c DESC LIMIT 10;
SELECT 1, URL, COUNT(*) AS c FROM hits GROUP BY 1, URL ORDER BY c DESC LIMIT 10;
SELECT ClientIP, ClientIP - 1, ClientIP - 2, ClientIP - 3, COUNT(*) AS c FROM hits GROUP BY ClientIP, ClientIP - 1, ClientIP - 2, ClientIP - 3 ORDER BY c DESC LIMIT 10;
SELECT URL, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND DontCountHits = 0 AND IsRefresh = 0 AND URL <> '' GROUP BY URL ORDER BY PageViews DESC LIMIT 10;
SELECT Title, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND DontCountHits = 0 AND IsRefresh = 0 AND Title <> '' GROUP BY Title ORDER BY PageViews DESC LIMIT 10;
SELECT URL, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND IsRefresh = 0 AND IsLink <> 0 AND IsDownload = 0 GROUP BY URL ORDER BY PageViews DESC LIMIT 10 OFFSET 1000;
SELECT TraficSourceID, SearchEngineID, AdvEngineID, CASE WHEN (SearchEngineID = 0 AND AdvEngineID = 0) THEN Referer ELSE '' END AS Src, URL AS Dst, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND IsRefresh = 0 GROUP BY TraficSourceID, SearchEngineID, AdvEngineID, Src, Dst ORDER BY PageViews DESC LIMIT 10 OFFSET 1000;
SELECT URLHash, EventDate, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND IsRefresh = 0 AND TraficSourceID IN (-1, 6) AND RefererHash = 3594120000172545465 GROUP BY URLHash, EventDate ORDER BY PageViews DESC LIMIT 10 OFFSET 100;
SELECT WindowClientWidth, WindowClientHeight, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND IsRefresh = 0 AND DontCountHits = 0 AND URLHash = 2868770270353813622 GROUP BY WindowClientWidth, WindowClientHeight ORDER BY PageViews DESC LIMIT 10 OFFSET 10000;
SELECT NULL;
58 changes: 58 additions & 0 deletions oxla/results/c6a.4xlarge.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
{
"system": "Oxla.com",
"date": "2024-01-31",
"machine": "c6a.4xlarge, 500gb gp2",
"cluster_size": 1,
"comment": "ingests data only from non-compressed cvs. data should be ingested in chunks < ~5Gb",

"tags": ["C", "analytical", "somewhat PostgreSQL compatible"],

"load_time": 584.88,
"data_size": 83948840996,

"result": [
[0.050129,0.022855,0.022903],
[0.057503,0.015481,0.015516],
[0.094277,0.025541,0.026299],
[0.16258,0.024306,0.023829],
[0.039942,0.021789,0.02218],
[0.1374,0.101147,0.099119],
[0.059388,0.025719,0.0264],
[0.033712,0.017465,0.0184],
[0.135859,0.096702,0.095512],
[0.171016,0.155303,0.154772],
[0.190669,0.182083,0.183751],
[0.22111,0.18886,0.193252],
[2.52142,2.55214,2.54007],
[2.6807,2.5843,2.63534],
[2.75055,2.68715,2.67322],
[1.52986,1.47056,1.49706],
[8.22161,7.84746,7.57008],
[7.06027,7.33078,6.88351],
[10.5416,10.2955,10.2149],
[0.053765,0.02466,0.023807],
[2.67538,2.54948,2.71025],
[null,null,null],
[null,null,null],
[8.75207,8.09075,8.26788],
[0.359349,0.31487,0.312975],
[0.331405,0.339576,0.332647],
[0.351841,0.340408,0.325682],
[1.62115,1.67895,1.74872],
[null,null,null],
[1.58662,1.58356,1.58278],
[0.632047,0.553994,0.586148],
[1.15181,1.14935,1.10892],
[13.1952,13.0677,12.6957],
[15.9073,13.0187,12.6488],
[12.3274,13.7521,14.7386],
[1.27877,1.35705,1.24621],
[1.011,0.955655,0.967898],
[3.46346,0.841062,0.878294],
[0.867865,0.885799,0.812358],
[4.55556,1.79754,1.75293],
[4.00301,0.064817,0.062893],
[0.163954,0.063076,0.060033],
[null,null,null]
]
}
13 changes: 13 additions & 0 deletions oxla/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/bash

TRIES=3

cat queries.sql | while read query; do
sync
echo 3 | sudo tee /proc/sys/vm/drop_caches

echo "$query";
for i in $(seq 1 $TRIES); do
sudo psql -h localhost -p 5432 -U postgres -d test -t -c '\timing' -c "$query" | grep 'Time'
done;
done;

0 comments on commit a7437ed

Please sign in to comment.