diff --git a/.github/workflows/label_external_issues.yml b/.github/workflows/label_external_issues.yml index 582c4e2c582c..0d60112bec75 100644 --- a/.github/workflows/label_external_issues.yml +++ b/.github/workflows/label_external_issues.yml @@ -7,16 +7,47 @@ jobs: label-external-issues: name: Label issue from external user runs-on: ubuntu-latest - # https://docs.github.com/en/graphql/reference/enums#commentauthorassociation - if: ${{ !contains(fromJson('["MEMBER", "OWNER", "COLLABORATOR"]'), github.event.issue.author_association) }} steps: - name: add external label uses: actions/github-script@v7 with: + github-token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} script: | - github.rest.issues.addLabels({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - labels: ['external'] - }) + const issueAuthor = context.payload.issue.user.login + + if (context.repo.owner == issueAuthor) { + console.log("Issue author is here"); + return + } + + const org = context.repo.owner; + + const isOrgMember = async function () { + try { + const response = await github.rest.orgs.checkMembershipForUser({ + org, + username: issueAuthor, + }); + return response.status == 204; + } catch (error) { + if (error.status && error.status == 404) { + return false; + } + throw error; + } + } + + console.log(`Checking membership for user: ${issueAuthor} in organization: ${org}`); + + if (!await isOrgMember()) { + console.log(`User ${issueAuthor} is not a member of the organization.`) + + github.rest.issues.addLabels({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + labels: ['external'] + }) + } else { + console.log(`User ${issueAuthor} is a member of the organization.`) + } diff --git a/ydb/library/workload/tpch/s10000_canonical/q1.result b/ydb/library/workload/tpch/s10000_canonical/q1.result index c5043a56123d..5d67d2029a36 100644 --- a/ydb/library/workload/tpch/s10000_canonical/q1.result +++ b/ydb/library/workload/tpch/s10000_canonical/q1.result @@ -1,3 +1,3 @@ l_returnflag,l_linestatus,sum_qty,sum_base_price,sum_disc_price,sum_charge,avg_qty,avg_price,avg_disc,count_order -A,F,3.774713243e+11+-100,5.660221407e+14+-1000,5.377203891e+14+-1000,5.592291037e+14+-1000,25.5+-0.01,38237.28+-0.01,0.05+-0.01,14802885380 +A,F,3.775711377e+11,5.66171807e+14,5.378625747e+14,5.59376974e+14,25.5+-0.01,38237.28+-0.01,0.05+-0.01,14802885380 ... \ No newline at end of file diff --git a/ydb/library/workload/tpch/s10000_canonical/q10.result b/ydb/library/workload/tpch/s10000_canonical/q10.result new file mode 100644 index 000000000000..c96c8c2c4ca4 --- /dev/null +++ b/ydb/library/workload/tpch/s10000_canonical/q10.result @@ -0,0 +1,3 @@ +c_custkey,c_name,revenue,c_acctbal,n_name,c_address,c_phone,c_comment +883656898,Customer#883656898,4676163.4255,2487.44,MOROCCO,"6Temru GsGPkOQ",25-465-335-3565,"ites across the blithely regular pinto beans cajole quickly carefully ironic pinto beans. furiousdep" +... \ No newline at end of file diff --git a/ydb/library/workload/tpch/s10000_canonical/q11.result b/ydb/library/workload/tpch/s10000_canonical/q11.result index 48c29edad64f..9be19376550b 100644 --- a/ydb/library/workload/tpch/s10000_canonical/q11.result +++ b/ydb/library/workload/tpch/s10000_canonical/q11.result @@ -1 +1,3 @@ -ps_partkey,value \ No newline at end of file +ps_partkey,value +333984873,30196540. +... \ No newline at end of file diff --git a/ydb/library/workload/tpch/s10000_canonical/q12.result b/ydb/library/workload/tpch/s10000_canonical/q12.result index 7d9ef3fc4e2d..c4f4142597d1 100644 --- a/ydb/library/workload/tpch/s10000_canonical/q12.result +++ b/ydb/library/workload/tpch/s10000_canonical/q12.result @@ -1,3 +1,3 @@ l_shipmode,high_line_count,low_line_count -MAIL,62299162,93460023 +MAIL,62315515,93484567 ... \ No newline at end of file diff --git a/ydb/library/workload/tpch/s10000_canonical/q16.result b/ydb/library/workload/tpch/s10000_canonical/q16.result new file mode 100644 index 000000000000..c81ca62f308d --- /dev/null +++ b/ydb/library/workload/tpch/s10000_canonical/q16.result @@ -0,0 +1,3 @@ +p_brand,p_type,p_size,supplier_cnt +Brand#52,"MEDIUM BRUSHED NICKEL",14,43072 +... \ No newline at end of file diff --git a/ydb/library/workload/tpch/s10000_canonical/q19.result b/ydb/library/workload/tpch/s10000_canonical/q19.result new file mode 100644 index 000000000000..72af4e50c3cb --- /dev/null +++ b/ydb/library/workload/tpch/s10000_canonical/q19.result @@ -0,0 +1,2 @@ +revenue +29908721391.699352 \ No newline at end of file diff --git a/ydb/library/workload/tpch/s10000_canonical/q3.result b/ydb/library/workload/tpch/s10000_canonical/q3.result index b7c05b0b937e..79d1b6212181 100644 --- a/ydb/library/workload/tpch/s10000_canonical/q3.result +++ b/ydb/library/workload/tpch/s10000_canonical/q3.result @@ -1,3 +1,3 @@ l_orderkey,revenue,o_orderdate,o_shippriority -43904139302,510414.04+-0.01,1995-02-13,0 +35314204736 ,510414.04,1995-02-13,0 ... \ No newline at end of file diff --git a/ydb/library/workload/tpch/s10000_canonical/q4.result b/ydb/library/workload/tpch/s10000_canonical/q4.result new file mode 100644 index 000000000000..9c3395d8cb91 --- /dev/null +++ b/ydb/library/workload/tpch/s10000_canonical/q4.result @@ -0,0 +1,3 @@ +o_orderpriority,order_count +1-URGENT,105191109 +... \ No newline at end of file diff --git a/ydb/library/workload/tpch/s10000_canonical/q5.result b/ydb/library/workload/tpch/s10000_canonical/q5.result index f8faddfb47a5..0767372104d0 100644 --- a/ydb/library/workload/tpch/s10000_canonical/q5.result +++ b/ydb/library/workload/tpch/s10000_canonical/q5.result @@ -1,3 +1,3 @@ n_name,revenue -"INDIA",5.29304173e+11 +"INDIA",5.29441751e+11 ... \ No newline at end of file diff --git a/ydb/library/workload/tpch/s10000_canonical/q6.result b/ydb/library/workload/tpch/s10000_canonical/q6.result index 088a76a28a92..266ff2f9c6ce 100644 --- a/ydb/library/workload/tpch/s10000_canonical/q6.result +++ b/ydb/library/workload/tpch/s10000_canonical/q6.result @@ -1,2 +1,2 @@ revenue -1.232837859e+12 \ No newline at end of file +1.233162481e+12 \ No newline at end of file diff --git a/ydb/library/workload/tpch/s10000_canonical/q7.result b/ydb/library/workload/tpch/s10000_canonical/q7.result index 036f45326c6e..bb826c8dcae9 100644 --- a/ydb/library/workload/tpch/s10000_canonical/q7.result +++ b/ydb/library/workload/tpch/s10000_canonical/q7.result @@ -1,3 +1,3 @@ supp_nation,cust_nation,l_year,revenue -FRANCE,GERMANY,1995,5.284883674e+11 +FRANCE,GERMANY,1995,5.286286888e+11 ... \ No newline at end of file diff --git a/ydb/tests/olap/ttl_tiering/ttl_unavailable_s3.py b/ydb/tests/olap/ttl_tiering/ttl_unavailable_s3.py new file mode 100644 index 000000000000..db813d282a3e --- /dev/null +++ b/ydb/tests/olap/ttl_tiering/ttl_unavailable_s3.py @@ -0,0 +1,88 @@ +import os +import signal +import sys +import time + +from .base import TllTieringTestBase + +ROWS_CHUNK_SIZE = 1000000 +ROWS_CHUNKS_COUNT = 10 + + +class TestUnavailableS3(TllTieringTestBase): + def test(self): + """As per https://github.com/ydb-platform/ydb/issues/13545""" + bucket_s3_name = "cold" + bucket_db_path = f"{self.ydb_client.database}/buckets/{bucket_s3_name}" + + self.ydb_client.query(""" + CREATE TABLE table ( + ts Timestamp NOT NULL, + v String, + PRIMARY KEY(ts), + ) + WITH (STORE = COLUMN) + """) + + self.s3_client.create_bucket(bucket_s3_name) + + self.ydb_client.query(f"CREATE OBJECT s3_id (TYPE SECRET) WITH value = '{self.s3_client.key_id}'") + self.ydb_client.query(f"CREATE OBJECT s3_secret (TYPE SECRET) WITH value = '{self.s3_client.key_secret}'") + + self.ydb_client.query(f""" + CREATE EXTERNAL DATA SOURCE `{bucket_db_path}` WITH ( + SOURCE_TYPE="ObjectStorage", + LOCATION="{self.s3_client.endpoint}/{bucket_s3_name}", + AUTH_METHOD="AWS", + AWS_ACCESS_KEY_ID_SECRET_NAME="s3_id", + AWS_SECRET_ACCESS_KEY_SECRET_NAME="s3_secret", + AWS_REGION="{self.s3_client.region}" + ) + """) + + # table = ColumnTableHelper(self.ydb_client, 'table') + + def upsert_chunk(i): + return self.ydb_client.query(f""" + $n = {ROWS_CHUNK_SIZE}; + $beg_ul = CAST(Timestamp('2020-01-01T00:00:00.000000Z') as Uint64); + $end_ul = CAST(Timestamp('2030-01-01T00:00:00.000000Z') as Uint64); + $int_ul = $end_ul - $beg_ul; + $step_ul = 100000; + $rows_list = ListMap(ListFromRange(0, $n), ($j) -> (<| + ts: UNWRAP(CAST($beg_ul + $step_ul * {i}ul + CAST(Random($j) * $int_ul AS Uint64) AS Timestamp)), + v: "Entry #" || CAST($j AS String) + |>)); + + UPSERT INTO table + SELECT * FROM AS_TABLE($rows_list); + """) + + for i in range(0, ROWS_CHUNKS_COUNT // 2): + upsert_chunk(i) + + self.ydb_client.query(f""" + ALTER TABLE table SET (TTL = + Interval("P365D") TO EXTERNAL DATA SOURCE `{bucket_db_path}` + ON ts + ) + """) + + print("!!! simulating S3 hang up -- sending SIGSTOP", file=sys.stderr) + os.kill(self.s3_pid, signal.SIGSTOP) + + time.sleep(30) + + print("!!! simulating S3 recovery -- sending SIGCONT", file=sys.stderr) + os.kill(self.s3_pid, signal.SIGCONT) + + def get_stat(): + return self.s3_client.get_bucket_stat(bucket_s3_name)[0] + + # stat_old = get_stat() + + for i in range(ROWS_CHUNKS_COUNT // 2, ROWS_CHUNKS_COUNT): + upsert_chunk(i) + + # Uncomment after fixing https://github.com/ydb-platform/ydb/issues/13719 + # assert self.wait_for(lambda: get_stat() != stat_old, 120), "data distribution continuation" diff --git a/ydb/tests/olap/ttl_tiering/ya.make b/ydb/tests/olap/ttl_tiering/ya.make index 04a506a3f68e..2caf761764ff 100644 --- a/ydb/tests/olap/ttl_tiering/ya.make +++ b/ydb/tests/olap/ttl_tiering/ya.make @@ -6,6 +6,7 @@ ENV(YDB_ADDITIONAL_LOG_CONFIGS="TX_TIERING:DEBUG") TEST_SRCS( base.py ttl_delete_s3.py + ttl_unavailable_s3.py data_migration_when_alter_ttl.py ) diff --git a/ydb/tests/sql/ya.make b/ydb/tests/sql/ya.make index 75c120610266..0814437a954a 100644 --- a/ydb/tests/sql/ya.make +++ b/ydb/tests/sql/ya.make @@ -7,7 +7,7 @@ TEST_SRCS( test_inserts.py ) -SIZE(SMALL) +SIZE(MEDIUM) DEPENDS( ydb/apps/ydb @@ -25,4 +25,4 @@ END() RECURSE( lib large -) \ No newline at end of file +)