Skip to content

Commit

Permalink
Merge branch 'main' into cmakebuild
Browse files Browse the repository at this point in the history
  • Loading branch information
alexv-smirnov committed Jan 23, 2025
2 parents 1bce7e1 + 78fee80 commit 5ff8979
Show file tree
Hide file tree
Showing 15 changed files with 150 additions and 17 deletions.
47 changes: 39 additions & 8 deletions .github/workflows/label_external_issues.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,47 @@ jobs:
label-external-issues:
name: Label issue from external user
runs-on: ubuntu-latest
# https://docs.github.com/en/graphql/reference/enums#commentauthorassociation
if: ${{ !contains(fromJson('["MEMBER", "OWNER", "COLLABORATOR"]'), github.event.issue.author_association) }}
steps:
- name: add external label
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }}
script: |
github.rest.issues.addLabels({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
labels: ['external']
})
const issueAuthor = context.payload.issue.user.login
if (context.repo.owner == issueAuthor) {
console.log("Issue author is here");
return
}
const org = context.repo.owner;
const isOrgMember = async function () {
try {
const response = await github.rest.orgs.checkMembershipForUser({
org,
username: issueAuthor,
});
return response.status == 204;
} catch (error) {
if (error.status && error.status == 404) {
return false;
}
throw error;
}
}
console.log(`Checking membership for user: ${issueAuthor} in organization: ${org}`);
if (!await isOrgMember()) {
console.log(`User ${issueAuthor} is not a member of the organization.`)
github.rest.issues.addLabels({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
labels: ['external']
})
} else {
console.log(`User ${issueAuthor} is a member of the organization.`)
}
2 changes: 1 addition & 1 deletion ydb/library/workload/tpch/s10000_canonical/q1.result
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
l_returnflag,l_linestatus,sum_qty,sum_base_price,sum_disc_price,sum_charge,avg_qty,avg_price,avg_disc,count_order
A,F,3.774713243e+11+-100,5.660221407e+14+-1000,5.377203891e+14+-1000,5.592291037e+14+-1000,25.5+-0.01,38237.28+-0.01,0.05+-0.01,14802885380
A,F,3.775711377e+11,5.66171807e+14,5.378625747e+14,5.59376974e+14,25.5+-0.01,38237.28+-0.01,0.05+-0.01,14802885380
...
3 changes: 3 additions & 0 deletions ydb/library/workload/tpch/s10000_canonical/q10.result
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
c_custkey,c_name,revenue,c_acctbal,n_name,c_address,c_phone,c_comment
883656898,Customer#883656898,4676163.4255,2487.44,MOROCCO,"6Temru GsGPkOQ",25-465-335-3565,"ites across the blithely regular pinto beans cajole quickly carefully ironic pinto beans. furiousdep"
...
4 changes: 3 additions & 1 deletion ydb/library/workload/tpch/s10000_canonical/q11.result
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
ps_partkey,value
ps_partkey,value
333984873,30196540.
...
2 changes: 1 addition & 1 deletion ydb/library/workload/tpch/s10000_canonical/q12.result
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
l_shipmode,high_line_count,low_line_count
MAIL,62299162,93460023
MAIL,62315515,93484567
...
3 changes: 3 additions & 0 deletions ydb/library/workload/tpch/s10000_canonical/q16.result
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
p_brand,p_type,p_size,supplier_cnt
Brand#52,"MEDIUM BRUSHED NICKEL",14,43072
...
2 changes: 2 additions & 0 deletions ydb/library/workload/tpch/s10000_canonical/q19.result
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
revenue
29908721391.699352
2 changes: 1 addition & 1 deletion ydb/library/workload/tpch/s10000_canonical/q3.result
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
l_orderkey,revenue,o_orderdate,o_shippriority
43904139302,510414.04+-0.01,1995-02-13,0
35314204736 ,510414.04,1995-02-13,0
...
3 changes: 3 additions & 0 deletions ydb/library/workload/tpch/s10000_canonical/q4.result
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
o_orderpriority,order_count
1-URGENT,105191109
...
2 changes: 1 addition & 1 deletion ydb/library/workload/tpch/s10000_canonical/q5.result
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
n_name,revenue
"INDIA",5.29304173e+11
"INDIA",5.29441751e+11
...
2 changes: 1 addition & 1 deletion ydb/library/workload/tpch/s10000_canonical/q6.result
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
revenue
1.232837859e+12
1.233162481e+12
2 changes: 1 addition & 1 deletion ydb/library/workload/tpch/s10000_canonical/q7.result
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
supp_nation,cust_nation,l_year,revenue
FRANCE,GERMANY,1995,5.284883674e+11
FRANCE,GERMANY,1995,5.286286888e+11
...
88 changes: 88 additions & 0 deletions ydb/tests/olap/ttl_tiering/ttl_unavailable_s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
import os
import signal
import sys
import time

from .base import TllTieringTestBase

ROWS_CHUNK_SIZE = 1000000
ROWS_CHUNKS_COUNT = 10


class TestUnavailableS3(TllTieringTestBase):
def test(self):
"""As per https://github.com/ydb-platform/ydb/issues/13545"""
bucket_s3_name = "cold"
bucket_db_path = f"{self.ydb_client.database}/buckets/{bucket_s3_name}"

self.ydb_client.query("""
CREATE TABLE table (
ts Timestamp NOT NULL,
v String,
PRIMARY KEY(ts),
)
WITH (STORE = COLUMN)
""")

self.s3_client.create_bucket(bucket_s3_name)

self.ydb_client.query(f"CREATE OBJECT s3_id (TYPE SECRET) WITH value = '{self.s3_client.key_id}'")
self.ydb_client.query(f"CREATE OBJECT s3_secret (TYPE SECRET) WITH value = '{self.s3_client.key_secret}'")

self.ydb_client.query(f"""
CREATE EXTERNAL DATA SOURCE `{bucket_db_path}` WITH (
SOURCE_TYPE="ObjectStorage",
LOCATION="{self.s3_client.endpoint}/{bucket_s3_name}",
AUTH_METHOD="AWS",
AWS_ACCESS_KEY_ID_SECRET_NAME="s3_id",
AWS_SECRET_ACCESS_KEY_SECRET_NAME="s3_secret",
AWS_REGION="{self.s3_client.region}"
)
""")

# table = ColumnTableHelper(self.ydb_client, 'table')

def upsert_chunk(i):
return self.ydb_client.query(f"""
$n = {ROWS_CHUNK_SIZE};
$beg_ul = CAST(Timestamp('2020-01-01T00:00:00.000000Z') as Uint64);
$end_ul = CAST(Timestamp('2030-01-01T00:00:00.000000Z') as Uint64);
$int_ul = $end_ul - $beg_ul;
$step_ul = 100000;
$rows_list = ListMap(ListFromRange(0, $n), ($j) -> (<|
ts: UNWRAP(CAST($beg_ul + $step_ul * {i}ul + CAST(Random($j) * $int_ul AS Uint64) AS Timestamp)),
v: "Entry #" || CAST($j AS String)
|>));
UPSERT INTO table
SELECT * FROM AS_TABLE($rows_list);
""")

for i in range(0, ROWS_CHUNKS_COUNT // 2):
upsert_chunk(i)

self.ydb_client.query(f"""
ALTER TABLE table SET (TTL =
Interval("P365D") TO EXTERNAL DATA SOURCE `{bucket_db_path}`
ON ts
)
""")

print("!!! simulating S3 hang up -- sending SIGSTOP", file=sys.stderr)
os.kill(self.s3_pid, signal.SIGSTOP)

time.sleep(30)

print("!!! simulating S3 recovery -- sending SIGCONT", file=sys.stderr)
os.kill(self.s3_pid, signal.SIGCONT)

def get_stat():
return self.s3_client.get_bucket_stat(bucket_s3_name)[0]

# stat_old = get_stat()

for i in range(ROWS_CHUNKS_COUNT // 2, ROWS_CHUNKS_COUNT):
upsert_chunk(i)

# Uncomment after fixing https://github.com/ydb-platform/ydb/issues/13719
# assert self.wait_for(lambda: get_stat() != stat_old, 120), "data distribution continuation"
1 change: 1 addition & 0 deletions ydb/tests/olap/ttl_tiering/ya.make
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ ENV(YDB_ADDITIONAL_LOG_CONFIGS="TX_TIERING:DEBUG")
TEST_SRCS(
base.py
ttl_delete_s3.py
ttl_unavailable_s3.py
data_migration_when_alter_ttl.py
)

Expand Down
4 changes: 2 additions & 2 deletions ydb/tests/sql/ya.make
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ TEST_SRCS(
test_inserts.py
)

SIZE(SMALL)
SIZE(MEDIUM)

DEPENDS(
ydb/apps/ydb
Expand All @@ -25,4 +25,4 @@ END()
RECURSE(
lib
large
)
)

0 comments on commit 5ff8979

Please sign in to comment.