diff --git a/clickhouseapi.js b/clickhouseapi.js index f58193d2277..2edbe484623 100644 --- a/clickhouseapi.js +++ b/clickhouseapi.js @@ -85,55 +85,60 @@ function generateDocusaurusMarkdown(spec, groupedEndpoints, prefix) { } if (operation.responses) { - markdownContent += `\n### Response\n\n`; - - markdownContent += `#### Response Schema\n\n`; - const rawSchema = operation.responses['200'].content["application/json"].schema const result = rawSchema.properties.result - const schema = rawSchema.properties.result.type === 'array' ? - result.items['$ref'].split('/').pop() : result['$ref'].split('/').pop() - - const bodyParamAttrs = spec.components.schemas[schema].properties - const bodyParams = Object.keys(bodyParamAttrs) - const sampleResponseObj = {} - markdownContent += `| Name | Type | Description |\n` - markdownContent += `| :--- | :--- | :---------- |\n` - - for (const parameter of bodyParams) { - const paramType = bodyParamAttrs[parameter].format || bodyParamAttrs[parameter].type - markdownContent += `| ${parameter} | ${paramType || ''} | ${bodyParamAttrs[parameter].description || ''} | \n` - - switch (paramType) { - case 'uuid': - sampleResponseObj[parameter] = 'uuid'; - break; - case 'string': - sampleResponseObj[parameter] = 'string'; - break; - case 'number': - sampleResponseObj[parameter] = 0; - break; - case 'array': - sampleResponseObj[parameter] = 'Array'; - break; - case 'boolean': - sampleResponseObj[parameter] = 'boolean'; - break; - case 'date-time': - sampleResponseObj[parameter] = 'date-time'; - break; - case 'email': - sampleResponseObj[parameter] = 'email'; - break; + if (result) { + markdownContent += `\n### Response\n\n`; + + markdownContent += `#### Response Schema\n\n`; + + const schema = rawSchema.properties.result.type === 'array' ? + result.items['$ref'].split('/').pop() : result['$ref'].split('/').pop() + + const bodyParamAttrs = spec.components.schemas[schema].properties + const bodyParams = Object.keys(bodyParamAttrs) + const sampleResponseObj = {} + + markdownContent += `| Name | Type | Description |\n` + markdownContent += `| :--- | :--- | :---------- |\n` + + for (const parameter of bodyParams) { + const paramType = bodyParamAttrs[parameter].format || bodyParamAttrs[parameter].type + markdownContent += `| ${parameter} | ${paramType || ''} | ${bodyParamAttrs[parameter].description || ''} | \n` + + switch (paramType) { + case 'uuid': + sampleResponseObj[parameter] = 'uuid'; + break; + case 'string': + sampleResponseObj[parameter] = 'string'; + break; + case 'number': + sampleResponseObj[parameter] = 0; + break; + case 'array': + sampleResponseObj[parameter] = 'Array'; + break; + case 'boolean': + sampleResponseObj[parameter] = 'boolean'; + break; + case 'date-time': + sampleResponseObj[parameter] = 'date-time'; + break; + case 'email': + sampleResponseObj[parameter] = 'email'; + break; + } } + + markdownContent += `\n#### Sample response\n\n`; + markdownContent += '```\n' + markdownContent += `${JSON.stringify(sampleResponseObj, 0, 2)}` + markdownContent += '\n```\n' } - markdownContent += `\n#### Sample response\n\n`; - markdownContent += '```\n' - markdownContent += `${JSON.stringify(sampleResponseObj, 0, 2)}` - markdownContent += '\n```\n' + } } } diff --git a/docs/en/about-us/adopters.md b/docs/en/about-us/adopters.md index 62aec8f3e4a..092ccf8e247 100644 --- a/docs/en/about-us/adopters.md +++ b/docs/en/about-us/adopters.md @@ -12,6 +12,7 @@ The following list of companies using ClickHouse and their success stories is as | Company | Industry | Usecase | Cluster Size | (Un)Compressed Data Size\* | Reference | |---------|----------|---------|--------------|------------------------------------------------------------------------------|-----------| +| [1Flow](https://1flow.ai/) | Feedback automation | - | — | — | ClickHouse Cloud user | | [2gis](https://2gis.ru) | Maps | Monitoring | — | — | [Talk in Russian, July 2019](https://youtu.be/58sPkXfq6nw) | | [3xpl](https://3xpl.com/) | Software & Technology | Blockchain Explorer | — | — | [Reddit, February 2023](https://www.reddit.com/r/ethereum/comments/1159pdg/new_ethereum_explorer_by_3xpl_no_ads_super_fast/) | | [5CNetwork](https://www.5cnetwork.com/) | Software | Analytics | — | — | [Community Slack](https://clickhouse.com/slack) | @@ -41,6 +42,7 @@ The following list of companies using ClickHouse and their success stories is as | [ASO.dev](https://aso.dev/) | Software & Technology | App store optimisation | — | — | [Twitter, April 2023](https://twitter.com/gorniv/status/1642847791226445828) | | [Atani](https://atani.com/en/) | Software & Technology | Crypto Platform | — | — | [CTO LinkedIn](https://www.linkedin.com/in/fbadiola/) | | [Autoblocks](https://autoblocks.ai) | Software & Technology | LLM Monitoring & Deployment | — | — | [Twitter, August 2023](https://twitter.com/nolte_adam/status/1690722237953794048) | +| [Aviso](https://www.aviso.com/) | AI Platform | Reporting | — | — | ClickHouse Cloud user | | [Avito](https://avito.ru/) | Classifieds | Monitoring | — | — | [Meetup, April 2020](https://www.youtube.com/watch?v=n1tm4j4W8ZQ) | | [AzurePrice](https://azureprice.net/) | Analytics | Main Product | — | — | [Blog, November 2022](https://blog.devgenius.io/how-i-migrate-to-clickhouse-and-speedup-my-backend-7x-and-decrease-cost-by-6x-part-1-2553251a9059) | | [B2Metric](https://b2metric.com/) | Marketing | Analytics | — | — | [ProductHunt, July 2023](https://www.producthunt.com/posts/b2metric-decision-intelligence?bc=1) | @@ -76,19 +78,24 @@ The following list of companies using ClickHouse and their success stories is as | [Cisco](http://cisco.com/) | Networking | Traffic analysis | — | — | [Lightning talk, October 2019](https://youtu.be/-hI1vDR2oPY?t=5057) | | [Citadel Securities](https://www.citadelsecurities.com/) | Finance | — | — | — | [Contribution, March 2019](https://github.com/ClickHouse/ClickHouse/pull/4774) | | [Citymobil](https://city-mobil.ru) | Taxi | Analytics | — | — | [Blog Post in Russian, March 2020](https://habr.com/en/company/citymobil/blog/490660/) | +| [Clearbit](https://clearbit.com/) | AI | Product usage | — | — | ClickHouse Cloud user | +| [ClickFunnels](https://www.clickfunnels.com/) | Website Builder | | — | — | ClickHouse Cloud user | | [ClickVisual](https://clickvisual.gocn.vip/) | Software | Logging Platform | — | — | [Blog Post, May 2022](https://golangexample.com/a-light-weight-log-visual-analytic-platform-for-clickhouse/) | | [Clog](https://www.hybridlogic.co.uk/) | Software & Technology | Logging | — | — | [Blog, February 2023](https://www.hybridlogic.co.uk/2023/02/clog/) | | [Cloudflare](https://cloudflare.com) | CDN | Traffic analysis | 36 servers | — | [Blog post, May 2017](https://blog.cloudflare.com/how-cloudflare-analyzes-1m-dns-queries-per-second/), [Blog post, March 2018](https://blog.cloudflare.com/http-analytics-for-6m-requests-per-second-using-clickhouse/) | | [Coinpaprika](https://coinpaprika.com/) | Software & Technology | Cryptocurrency Market Data Analysis | — | — | [Blog, May 2023](https://clickhouse.com/blog/coinpaprika-aggregates-pricing-data) | | [Comcast](https://corporate.comcast.com/) | Media | CDN Traffic Analysis | — | — | [ApacheCon 2019 Talk](https://www.youtube.com/watch?v=e9TZ6gFDjNg) | +| [Constructor](https://constructor.io/) | E-commerce Search | E-commerce Search | — | — | ClickHouse Cloud user | | [Contentsquare](https://contentsquare.com) | Web analytics | Main product | — | — | [Meetup Video, January 2023](https://www.youtube.com/watch?v=zvuCBAl2T0Q&list=PL0Z2YDlm0b3iNDUzpY1S3L_iV4nARda_U&index=5) [Blog Post, October 2022](https://clickhouse.com/blog/contentsquare-migration-from-elasticsearch-to-clickhouse) [Blog post in French, November 2018](http://souslecapot.net/2018/11/21/patrick-chatain-vp-engineering-chez-contentsquare-penser-davantage-amelioration-continue-que-revolution-constante/) | | [Coroot](https://coroot.com/) | Software & Technology | Observability | — | — | [Tweet, July 2023](https://twitter.com/coroot_com/status/1680993372385804288?s=20) | | [Corunet](https://coru.net/) | Analytics | Main product | — | — | [Slides in English, April 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup21/predictive_models.pdf) | +| [Covalent](https://www.covalenthq.com/) | Financial - Crypto | Blockchain analysis | — | — | ClickHouse Cloud user | | [CraiditX 氪信](https://www.creditx.com) | Finance AI | Analysis | — | — | [Slides in English, November 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup33/udf.pptx) | | [Crazypanda](https://crazypanda.ru/en/) | Games | | — | — | Live session on ClickHouse meetup | | [Criteo](https://www.criteo.com/) | Retail | Main product | — | — | [Slides in English, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup18/3_storetail.pptx) | | [Cryptology](https://cryptology.com/) | Digital Assets Trading Platform | — | — | — | [Job advertisement, March 2021](https://career.habr.com/companies/cryptology/vacancies) | | [Cumul.io](https://www.cumul.io) | Software & Technology | Customer Analytics | — | — | [Blog Post, June 2022](https://clickhouse.com/blog/optimizing-your-customer-facing-analytics-experience-with-cumul-io-and-clickhouse) | +| [Culver Max Entertainment/Sony Pictures](https://www.sonypicturesnetworks.com/overview) | Television/Entertainment | Media streaming analytics | — | — | ClickHouse Cloud user | | [Darwinium](https://www.darwinium.com/) | Software & Technology | Security and Fraud Analytics | — | — | [Blog Post, July 2022](https://clickhouse.com/blog/fast-feature-rich-and-mutable-clickhouse-powers-darwiniums-security-and-fraud-analytics-use-cases) | | [Dassana](https://lake.dassana.io/) | Cloud data platform | Main product | - | - | [Blog Post, Jan 2023](https://clickhouse.com/blog/clickhouse-powers-dassanas-security-data-lake) [Direct reference, April 2022](https://news.ycombinator.com/item?id=31111432) | | [Datafold](https://www.datafold.com/) | Data Reliability Platform | — | — | — | [Job advertisement, April 2022](https://www.datafold.com/careers) | @@ -117,6 +124,7 @@ The following list of companies using ClickHouse and their success stories is as | [FastNetMon](https://fastnetmon.com/) | DDoS Protection | Main Product | | — | [Official website](https://fastnetmon.com/docs-fnm-advanced/fastnetmon-advanced-traffic-persistency/) | | [FeatBit](https://www.featbit.co/) | Software & Technology | Feature Flag Management | — | — | [GitHub, August 2023](https://github.com/featbit/featbit) | | [FinBox](https://finbox.in/)| Software & Technology | Financial Services | — | — | [Slack](https://clickhousedb.slack.com/archives/C04N3AU38DV/p1688198501884219) | +| [Fingerprint](https://fingerprint.com/) | Fraud detection | Fraud detection | — | — | [Meetup](https://www.linkedin.com/posts/system29a_clickhouse-meetup-in-berlin-tue-may-16-activity-7063805876570050561-UE-n/) | | [Firebolt](https://www.firebolt.io/) | Analytics | Main product | - | - | [VLDB 2022 paper](https://www.firebolt.io/content/firebolt-vldb-cdms-2022), [VLDB 2022 slides](https://cdmsworkshop.github.io/2022/Slides/Fri_C2.5_MoshaPasumansky.pdf) | | [Flipkart](https://www.flipkart.com/) | e-Commerce | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=239) | | [Fortis Games](https://fortisgames.com/) | Game studio | Online data analytics | - | — | [Blog post, July 2023](https://thenewstack.io/a-real-time-data-platform-for-player-driven-game-experiences/) | @@ -180,8 +188,10 @@ The following list of companies using ClickHouse and their success stories is as | [Klaviyo](https://www.klaviyo.com/) | E-Commerce Marketing Automation Platform| — | 128 nodes | — | [Klaviyo Engineering Blog, Jan 2023](https://klaviyo.tech/adaptive-concurrency-control-for-mixed-analytical-workloads-51350439aeec) , [Klaviyo Engineering Blog, July 2023](https://klaviyo.tech/taking-the-first-sip-an-overview-of-klaviyos-segmentation-improvement-project-7db997f36b39) | | [Kodiak Data](https://www.kodiakdata.com/) | Clouds | Main product | — | — | [Slides in Engish, April 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup13/kodiak_data.pdf) | | [Kontur](https://kontur.ru) | Software Development | Metrics | — | — | [Talk in Russian, November 2018](https://www.youtube.com/watch?v=U4u4Bd0FtrY) | +| [Kopo Kopo](https://kopokopo.co.ke/) | FinTech | Metrics | — | — | ClickHouse Cloud user | | [Kuaishou](https://www.kuaishou.com/) | Video | — | — | — | [ClickHouse Meetup, October 2018](https://clickhouse.com/blog/en/2018/clickhouse-community-meetup-in-beijing-on-october-28-2018/) | | [KGK Global](https://www.kgk-global.com/en/) | Vehicle monitoring | — | — | — | [Press release, June 2021](https://zoom.cnews.ru/news/item/530921) | +| [KMK Online](https://www.kmkonline.co.id/) | Digital Services | Streaming analytics | — | — | ClickHouse Cloud user | | [Kyligence](https://kyligence.io/) | Managed Service | Main Product | — | — | [Website](https://kyligence.io/all-inclusive-olap/) | | [LANCOM Systems](https://www.lancom-systems.com/) | Network Solutions | Traffic analysis | - | - | [ClickHouse Operator for Kubernetes](https://www.lancom-systems.com/), [Hacker News post](https://news.ycombinator.com/item?id=29413660) | | [laudspeaker](https://laudspeaker.com/) | Software & Technology | Open Source Messaging | — | — | [GitHub](https://github.com/laudspeaker/laudspeaker) | @@ -214,6 +224,7 @@ The following list of companies using ClickHouse and their success stories is as | [Muse Group](https://mu.se/) | Music Software | Performance Monitoring | — | — | [Blog post in Russian, January 2021](https://habr.com/en/post/647079/) | | [MyScale](https://myscale.com/) | Software & Technology | AI Database | — | — | [Docs](https://docs.myscale.com/en/overview/) | | [NANO Corp](https://nanocorp.fr/en/) | Software & Technology | NOC as a Service | — | — | [Blog Post, July 2022](https://clickhouse.com/blog/from-experimentation-to-production-the-journey-to-supercolumn) | +| [Nansen](https://www.nansen.ai/) | Finance - Crypto | Analytics | — | — | [Press release](https://clickhouse.com/blog/clickhouse-cloud-on-google-cloud-platform-gcp-is-generally-available) | | [Nationale Databank Wegverkeers](https://www.ndw.nu/) | Software & Technology | Road Traffic Monitoring | — | — | [Presentation at Foss4G, August 2019](https://av.tib.eu/media/43434) | | [Nebius](https://nebius.com/il/docs/managed-clickhouse/) | SaaS | Main product | — | — | [Official website](https://nebius.com/il/docs/managed-clickhouse/) | | [Neocom](https://www.neocom.ai/) | Software & Technology | Sales Platform | — | — | [Hacker News, September 2023](https://news.ycombinator.com/item?id=37359122) | @@ -234,6 +245,7 @@ The following list of companies using ClickHouse and their success stories is as | [Ok.ru](https://ok.ru) | Social Network | — | 72 servers | 810 TB compressed, 50bn rows/day, 1.5 TB/day | [SmartData conference, October 2021](https://assets.ctfassets.net/oxjq45e8ilak/4JPHkbJenLgZhBGGyyonFP/57472ec6987003ec4078d0941740703b/____________________ClickHouse_______________________.pdf) | | [Omnicomm](https://omnicomm.ru/) | Transportation Monitoring | — | — | — | [Facebook post, October 2021](https://www.facebook.com/OmnicommTeam/posts/2824479777774500) | | [OneAPM](https://www.oneapm.com/) | Monitoring and Data Analysis | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/8.%20clickhouse在OneAPM的应用%20杜龙.pdf) | +| [Ongage](https://www.ongage.com/) | Marketing | Analytics | — | — | [Blog](https://clickhouse.com/blog/ongages-strategic-shift-to-clickhouse-for-real-time-email-marketing) | | [OONI](https://ooni.org/) | Open Observatory of Network Interference (OONI) | Main product | — | — | [Blog, May 2023]( https://clickhouse.com/blog/ooni-analyzes-internet-censorship-data-with-clickhouse)[Tweet August 2022](https://twitter.com/OpenObservatory/status/1558014810746265600?s=20&t=hvcDU-LIrgCApP0rZCzuoA) | | [Ookla](https://www.ookla.com/) | Software & Technology | Network Intelligence | — | — | [Presentation at J on the Beach, June 2023](https://www.youtube.com/watch?v=OZ0XpfDM8J0) | | [OpenReplay](https://openreplay.com/) | Product Analytics | Session Replay | — | — | [Docs](https://docs.openreplay.com/en/deployment/openreplay-admin/) | @@ -243,6 +255,7 @@ The following list of companies using ClickHouse and their success stories is as | [OpsVerse](https://opsverse.io/) | Observability | — | — | — | [Tweet, 2022](https://twitter.com/OpsVerse/status/1584548242100219904) | | [Oxide](https://oxide.computer/) | Hardware & Software | Server Control Plane | — | — | [GitHub Repository](https://github.com/oxidecomputer/omicron) | | [OZON](https://corp.ozon.com/) | E-commerce | — | — | — | [Official website](https://job.ozon.ru/vacancy/razrabotchik-clickhouse-ekspluatatsiya-40991870/) | +| [Pace](https://www.paceapp.com/) | Marketing & Sales | Internal app | — | — | ClickHouse Cloud user | | [Panelbear](https://panelbear.com/) | Analytics | Monitoring and Analytics | — | — | [Tech Stack, November 2020](https://panelbear.com/blog/tech-stack/) | | [Papermark](https://www.papermark.io/) | Software & Technology | Document Sharing & Analytics | — | — | [Twitter, September 2023](https://twitter.com/mfts0/status/1698670144367567263) | | [Percent 百分点](https://www.percent.cn/) | Analytics | Main Product | — | — | [Slides in Chinese, June 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup24/4.%20ClickHouse万亿数据双中心的设计与实践%20.pdf) | @@ -261,18 +274,21 @@ The following list of companies using ClickHouse and their success stories is as | [Postmates](https://postmates.com/) | Delivery | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=188) | | [Pragma Innovation](http://www.pragma-innovation.fr/) | Telemetry and Big Data Analysis | Main product | — | — | [Slides in English, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup18/4_pragma_innovation.pdf) | | [PRANA](https://prana-system.com/en/) | Industrial predictive analytics | Main product | — | — | [News (russian), Feb 2021](https://habr.com/en/news/t/541392/) | +| [Property Finder](https://www.propertyfinder.com/) | Real Estate | - | — | — | ClickHouse Cloud user | | [QINGCLOUD](https://www.qingcloud.com/) | Cloud services | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/4.%20Cloud%20%2B%20TSDB%20for%20ClickHouse%20张健%20QingCloud.pdf) | | [Qrator](https://qrator.net) | DDoS protection | Main product | — | — | [Blog Post, March 2019](https://blog.qrator.net/en/clickhouse-ddos-mitigation_37/) | | [Qualified](https://www.qualified.com/) | Sales Pipeline Management | Data and Messaging layers | — | — | [Job posting, Nov 2022](https://news.ycombinator.com/item?id=33425109) | +| [Qube Research & Technologies](https://www.qube-rt.com/) | FinTech | Analysis | — | — | ClickHouse Cloud user | | [QuickCheck](https://quickcheck.ng/) | FinTech | Analytics | — | — | [Blog post, May 2022](https://clickhouse.com/blog/how-quickcheck-uses-clickhouse-to-bring-banking-to-the-unbanked/) | | [R-Vision](https://rvision.pro/en/) | Information Security | — | — | — | [Article in Russian, December 2021](https://www.anti-malware.ru/reviews/R-Vision-SENSE-15) | | [Raiffeisenbank](https://www.rbinternational.com/) | Banking | Analytics | — | — | [Lecture in Russian, December 2020](https://cs.hse.ru/announcements/421965599.html) | | [Railway](https://railway.app/) | Software & Technology | PaaS Software Tools | — | — | [Changelog, May 2023](https://railway.app/changelog/2023-05-19-horizontal-scaling#logs-are-getting-faster) | | [Rambler](https://rambler.ru) | Internet services | Analytics | — | — | [Talk in Russian, April 2018](https://medium.com/@ramblertop/разработка-api-clickhouse-для-рамблер-топ-100-f4c7e56f3141) | +| [Rapid Delivery Analytics](https://rda.team/) | Retail | Analytics | — | — | ClickHouse Cloud user | | [RELEX](https://relexsolutions.com) | Supply Chain Planning | Forecasting | — | — | [Meetup Video, December 2022](https://www.youtube.com/watch?v=wyOSMR8l-DI&list=PL0Z2YDlm0b3iNDUzpY1S3L_iV4nARda_U&index=16) [Slides, December 2022](https://presentations.clickhouse.com/meetup65/CRUDy%20OLAP.pdf) | | [Replica](https://replicahq.com) | Urban Planning | Analytics | — | — | [Job advertisement](https://boards.greenhouse.io/replica/jobs/5547732002?gh_jid=5547732002) | | [Request Metrics](https://requestmetrics.com/) | Software & Technology | Observability | — | — | [Hacker News, May 2023](https://news.ycombinator.com/item?id=35982281) | -| [Resmo](https://replicahq.com) | Software & Technology | Cloud Security & Asset Management | 1 c7g.xlarge node, +| [Resmo](https://replicahq.com) | Software & Technology | Cloud Security & Asset Management | 1 c7g.xlarge node, 4 CPUs Graviton3 | 275 GiB | [Blog, April 2023](https://clickhouse.com/blog/how-we-used-clickhouse-to-store-opentelemetry-traces), [Changelog, July 2023](https://changelog.resmo.com/introducing-audit-logs-for-monitoring-all-system-activities-and-events-25Lrgs) | | [Retell](https://retell.cc/) | Speech synthesis | Analytics | — | — | [Blog Article, August 2020](https://vc.ru/services/153732-kak-sozdat-audiostati-na-vashem-sayte-i-zachem-eto-nuzhno) | | [Rivet](https://rivet.gg/) | Software & Technology | Gamer Server Scaling | — | — | [HackerNews, August 2023](https://news.ycombinator.com/item?id=37188659) | @@ -293,7 +309,10 @@ The following list of companies using ClickHouse and their success stories is as | [Sentio](https://www.sentio.xyz/) | Software & Technology | Observability | — | — | [Twitter, April 2023](https://twitter.com/qiaokan/status/1650736518955438083) | | [Sentry](https://sentry.io/) | Software Development | Main product | — | — | [Blog Post in English, May 2019](https://blog.sentry.io/2019/05/16/introducing-snuba-sentrys-new-search-infrastructure) | | [seo.do](https://seo.do/) | Analytics | Main product | — | — | [Slides in English, November 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup35/CH%20Presentation-%20Metehan%20Çetinkaya.pdf) | +| [Serverless](https://www.serverless.com/) | Serverless Apps | Metrics | — | — | ClickHouse Cloud user | | [ServiceNow](https://www.servicenow.com/) | Managed Services | Qualitative Mobile Analytics | — | — | [Meetup Video, January 2023](https://www.youtube.com/watch?v=b4Pmpx3iRK4&list=PL0Z2YDlm0b3iNDUzpY1S3L_iV4nARda_U&index=6) [Slides, January 2023](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup68/Appsee%20Remodeling%20-%20ClickHouse.pdf) | +| [SESCO Trading](https://www.sescotrading.com/) | Financial | Analysis | — | — | ClickHouse Cloud user | +| [Sewer AI](https://www.sewerai.com/) | Software & Technology | - | — | — | ClickHouse Cloud user | | [SGK](http://www.sgk.gov.tr/wps/portal/sgk/tr) | Government Social Security | Analytics | — | — | [Slides in English, November 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup35/ClickHouse%20Meetup-Ramazan%20POLAT.pdf) | | [SigNoz](https://signoz.io/) | Observability Platform | Main Product | — | — | [Source code](https://github.com/SigNoz/signoz) | | [Sina](http://english.sina.com/index.html) | News | — | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/6.%20ClickHouse最佳实践%20高鹏_新浪.pdf) | @@ -312,6 +331,7 @@ The following list of companies using ClickHouse and their success stories is as | [SQLPad](https://getsqlpad.com/en/introduction/) | Software & Technology | Web-based SQL editor. | — | — | [GitHub, March 2023](https://github.com/sqlpad/sqlpad/blob/master/server/package.json#L43) | | [Staffbase](https://staffbase.com/en/) | Software & Technology | Internal Communications | — | — | [ClickHouse Slack, April 2023](https://clickhousedb.slack.com/archives/C04N3AU38DV/p1682781081062859) | | [Staffcop](https://www.staffcop.ru/) | Information Security | Main Product | — | — | [Official website, Documentation](https://www.staffcop.ru/sce43) | +| [Statsig](https://statsig.com/) | Software & Technology | Real-time analytics | — | — | [Video](https://clickhouse.com/videos/statsig) | | [sumsub](https://sumsub.com/) | Software & Technology | Verification platform | — | — | [Meetup, July 2022](https://www.youtube.com/watch?v=F74bBGSMwGo) | | [Suning](https://www.suning.com/) | E-Commerce | User behaviour analytics | — | — | [Blog article](https://www.sohu.com/a/434152235_411876) | | [Superology](https://superology.com/) | Software & Technology | Customer Analytics | — | — | [Blog Post, June 2022](https://clickhouse.com/blog/collecting-semi-structured-data-from-kafka-topics-using-clickhouse-kafka-engine) | @@ -322,16 +342,20 @@ The following list of companies using ClickHouse and their success stories is as | [Synerise](https://synerise.com/) | ML&AI | Feature Store | - | - | [Presentation, April 2020](https://www.slideshare.net/AndrzejMichaowski/feature-store-solving-antipatterns-in-mlsystems-232829863) | | [Synpse](https://synpse.net/) | Application Management | Main Product | - | - | [Tweet, January 2022](https://twitter.com/KRusenas/status/1483571168363880455) | | [TeamApt](https://www.teamapt.com/) | FinTech | Data Processing | — | — | [Official Website](https://www.teamapt.com/) | +| [Teamtailor](https://www.teamtailor.com/en/) | Recruitment Software | - | — | — | ClickHouse Cloud user | | [Teralytics](https://www.teralytics.net/) | Mobility | Analytics | — | — | [Tech blog](https://www.teralytics.net/knowledge-hub/visualizing-mobility-data-the-scalability-challenge) | | [Tencent](https://www.tencent.com) | Big Data | Data processing | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/5.%20ClickHouse大数据集群应用_李俊飞腾讯网媒事业部.pdf) | | [Tencent](https://www.tencent.com) | Messaging | Logging | — | — | [Talk in Chinese, November 2019](https://youtu.be/T-iVQRuw-QY?t=5050) | | [Tencent Music Entertainment (TME)](https://www.tencentmusic.com/) | BigData | Data processing | — | — | [Blog in Chinese, June 2020](https://cloud.tencent.com/developer/article/1637840) | +| [Tekion](https://tekion.com/) | Automotive Retail | Clickstream Analytics | — | — | ClickHouse Cloud user | | [Tesla](https://www.tesla.com/) | Electric vehicle and clean energy company | — | — | — | [Vacancy description, March 2021](https://news.ycombinator.com/item?id=26306170) | | [Theia](https://theia.so/) | Software & Technology | Threat Intelligence | — | — | [Twitter, July 2023](https://twitter.com/jreynoldsdev/status/1680639586999980033) | +| [ThirdWeb](https://thirdweb.com/) | Software & Technology | Blockchain analysis | — | — | ClickHouse Cloud user | | [Timeflow](https://timeflow.systems) | Software | Analytics | — | — | [Blog](https://timeflow.systems/why-we-moved-from-druid-to-clickhouse/ ) | | [Timeplus](https://www.timeplus.com/) | Software & Technology | Streaming Analytics | — | — | [Meetup, August 2023](https://www.meetup.com/clickhouse-silicon-valley-meetup-group/events/294472987/) | | [The Guild](https://the-guild.dev/) | API Platform | Monitoring | — | — | [Blog Post, November 2022](https://clickhouse.com/blog/100x-faster-graphql-hive-migration-from-elasticsearch-to-clickhouse) [Blog](https://the-guild.dev/blog/graphql-hive-and-clickhouse) | | [Tinybird](https://www.tinybird.co/) | Real-time Data Products | Data processing | — | — | [Official website](https://www.tinybird.co/) | +| [TrackingPlan](https://www.trackingplan.com/) | Marketing & Sales | Monitoring | — | — | ClickHouse Cloud user | | [Traffic Stars](https://trafficstars.com/) | AD network | — | 300 servers in Europe/US | 1.8 PiB, 700 000 insert rps (as of 2021) | [Slides in Russian, May 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup15/lightning/ninja.pdf) | | [Trillabit](https://www.trillabit.com/home) | Software & Technology | Business Intelligence | — | — | [Blog, January 2023](https://clickhouse.com/blog/trillabit-utilizes-the-power-of-clickhouse-for-fast-scalable-results-within-their-self-service-search-driven-analytics-offering) | | [Trip.com](https://trip.com/) | Travel Services | Logging | — | — | [Meetup, March 2023](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup71/Trip.com.pdf) | @@ -346,6 +370,7 @@ The following list of companies using ClickHouse and their success stories is as | [Valueleaf Services Pvt.Ltd](http://valueleaf.com/) | Software & Technology | Martech platform, Ads platform and Loan aggregator platform | — | — | [ClickHouse Slack, April 2023](https://clickhousedb.slack.com/archives/C04N3AU38DV/p1681122299263959) | | [Vantage](https://www.vantage.sh/) | Software & Technology | Cloud Cost Management | — | — | [Meetup, April 2023](https://www.youtube.com/watch?v=gBgXcHM_ldc) , [ClickHouse Blog, June 2023](https://clickhouse.com/blog/nyc-meetup-report-vantages-journey-from-redshift-and-postgres-to-clickhouse) | | [Vercel](https://vercel.com/) | Traffic and Performance Analytics | — | — | — | Direct reference, October 2021 | +| [Vidazoo](https://www.vidazoo.com/) | Advertising | Analytics | — | — | ClickHouse Cloud user | | [VKontakte](https://vk.com) | Social Network | Statistics, Logging | — | — | [Slides in Russian, August 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup17/3_vk.pdf) | | [VKontech](https://vkontech.com/) | Distributed Systems | Migrating from MongoDB | - | - | [Blog, January 2022](https://vkontech.com/migrating-your-reporting-queries-from-a-general-purpose-db-mongodb-to-a-data-warehouse-clickhouse-performance-overview/) | | [VMware](https://www.vmware.com/) | Cloud | VeloCloud, SDN | — | — | [Product documentation](https://docs.vmware.com/en/vRealize-Operations-Manager/8.3/com.vmware.vcom.metrics.doc/GUID-A9AD72E1-C948-4CA2-971B-919385AB3CA8.html) | @@ -358,6 +383,7 @@ The following list of companies using ClickHouse and their success stories is as | [Wildberries](https://www.wildberries.ru/) | E-commerce | | — | — | [Official website](https://it.wildberries.ru/) | | [Wisebits](https://wisebits.com/) | IT Solutions | Analytics | — | — | [Slides in Russian, May 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup22/strategies.pdf) | | [Workato](https://www.workato.com/) | Automation Software | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=334) | +| [Wowza](https://www.wowza.com/) | Video Platform | Streaming Analytics | — | — | ClickHouse Cloud user | | [WSPR Live](https://wspr.live/) | Software & Technology | WSPR Spot Data | — | — | [Twitter, April 2023](https://twitter.com/HB9VQQ/status/1652723207475015680) | | [Wundergraph](https://wundergraph.com/) | Software & Technology | API Platform | — | — | [Twitter, February 2023](https://twitter.com/dustindeus/status/1628757807913750531) | | [Xenoss](https://xenoss.io/) | Martech, Adtech development | — | — | — | [Official website](https://xenoss.io/big-data-solution-development)| diff --git a/docs/en/cloud/bestpractices/avoidnullablecolumns.md b/docs/en/cloud/bestpractices/avoidnullablecolumns.md index ab2e28ea87c..c798425c7ec 100644 --- a/docs/en/cloud/bestpractices/avoidnullablecolumns.md +++ b/docs/en/cloud/bestpractices/avoidnullablecolumns.md @@ -4,7 +4,7 @@ sidebar_label: Avoid Nullable Columns title: Avoid Nullable Columns --- -[`Nullable` column](/docs/en/sql-reference/data-types/nullable/) (e.g. `Nullable(String))` creates a separate column of `UInt8` type. This additional column has to be processed every time a user works with a nullable column. This leads to additional storage space used and almost always negatively affects performance. +[`Nullable` column](/docs/en/sql-reference/data-types/nullable/) (e.g. `Nullable(String)`) creates a separate column of `UInt8` type. This additional column has to be processed every time a user works with a nullable column. This leads to additional storage space used and almost always negatively affects performance. To avoid `Nullable` columns, consider setting a default value for that column. For example, instead of: diff --git a/docs/en/cloud/reference/changelog.md b/docs/en/cloud/reference/changelog.md index 73a74681a49..d49b52c5fee 100644 --- a/docs/en/cloud/reference/changelog.md +++ b/docs/en/cloud/reference/changelog.md @@ -5,6 +5,39 @@ title: Cloud Changelog --- In addition to this ClickHouse Cloud changelog, please see the [Cloud Compatibility](/docs/en/cloud/reference/cloud-compatibility.md) page. + +## October 19, 2023 + +This release brings usability and performance improvements in the SQL console, better IP data type handling in the Metabase connector, and new functionality in the Java and Node.js clients. + +### Console changes +- Improved usability of the SQL console (e.g. preserve column width between query executions) +- Improved performance of the SQL console + +### Integrations changes +- Java client: + - Switched the default network library to improve performance and reuse open connections + - Added proxy support + - Added support for secure connections with using Trust Store +- Node.js client: Fixed keep-alive behavior for insert queries +- Metabase: Fixed IPv4/IPv6 column serialization + +## September 28, 2023 + +This release brings general availability of ClickPipes for Kafka, Confluent Cloud, and Amazon MSK and the Kafka Connect ClickHouse Sink, self-service workflow to secure access to Amazon S3 via IAM roles, and AI-assisted query suggestions ( private preview). + +### Console changes +- Added a self-service workflow to secure [access to Amazon S3 via IAM roles](/docs/en/cloud/manage/security/secure-s3) +- Introduced AI-assisted query suggestions in private preview (please [contact ClickHouse Cloud support](https://clickhouse.cloud/support) to try it out!) + +### Integrations changes +- Announced general availability of ClickPipes - a turnkey data ingestion service - for Kafka, Confluent Cloud, and Amazon MSK (see the [release blog](https://clickhouse.com/blog/clickpipes-is-generally-available)) +- Reached general availability of Kafka Connect ClickHouse Sink + - Extended support for customized ClickHouse settings using `clickhouse.settings` property + - Improved deduplication behavior to account for dynamic fields + - Added support for `tableRefreshInterval` to re-fetch table changes from ClickHouse +- Fixed an SSL connection issue and type mappings between [PowerBI](/docs/en/integrations/powerbi) and ClickHouse data types + ## September 7, 2023 This release brings the beta release of the PowerBI Desktop official connector, improved credit card payment handling for India, and multiple improvements across supported language clients. @@ -20,7 +53,6 @@ This release brings the beta release of the PowerBI Desktop official connector, - Node.js client: added default_format setting support - Golang client: fixed bool type handling, removed string limits - ## Aug 24, 2023 This release adds support for the MySQL interface to the ClickHouse database, introduces a new official PowerBI connector, adds a new “Running Queries” view in the cloud console, and updates the ClickHouse version to 23.7. diff --git a/docs/en/cloud/reference/cloud-compatibility.md b/docs/en/cloud/reference/cloud-compatibility.md index 62c94dedf5c..9055c168dd7 100644 --- a/docs/en/cloud/reference/cloud-compatibility.md +++ b/docs/en/cloud/reference/cloud-compatibility.md @@ -27,9 +27,10 @@ For the most part, the DDL syntax of ClickHouse Cloud should match what is avail - Support for `CREATE AS SELECT`, which is currently not available. As a workaround, we suggest using `CREATE ... EMPTY ... AS SELECT` and then inserting into that table (see [this blog](https://clickhouse.com/blog/getting-data-into-clickhouse-part-1) for an example). - Some experimental syntax may be disabled, for instance, `ALTER TABLE … MODIFY QUERY` statement. - Some introspection functionality may be disabled for security purposes, for example, the `addressToLine` SQL function. + - Do not use `ON CLUSTER` parameters in ClickHouse Cloud - these are not needed. While these are mostly no-op functions, they can still cause an error if you are trying to use [macros](https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings#macros). Macros often do not work and are not needed in ClickHouse Cloud. ### Database and table engines -ClickHouse Cloud provides a highly-available, replicated service by default. As a result, the database engine is Replicated and the following table engines are supported: +ClickHouse Cloud provides a highly-available, replicated service by default. As a result, all database and table engines are "Replicated": - ReplicatedMergeTree (default, when none is specified) - ReplicatedSummingMergeTree - ReplicatedAggregatingMergeTree @@ -57,6 +58,8 @@ ClickHouse Cloud provides a highly-available, replicated service by default. As - PostgreSQL - S3 +Please note: in ClickHouse Cloud, you do not need to add the "Replicated" term to your specified database or table engine. All *MergeTree tables are replicated in ClickHouse Cloud automatically. + ### Interfaces ClickHouse Cloud supports HTTPS and Native interfaces. Support for more interfaces such as MySQL and Postgres is coming soon. @@ -83,20 +86,20 @@ Experimental features can be self-enabled by users in Development services. They ### Kafka -The [Kafka Table Engine](/docs/en/integrations/data-ingestion/kafka/index.md) is not available in ClickHouse Cloud. Instead, we recommend relying on architectures that decouple the Kafka connectivity components from the ClickHouse service to achieve a separation of concerns. We recommend considering the alternatives listed in the [Kafka User Guide](/docs/en/integrations/data-ingestion/kafka/index.md) +The [Kafka Table Engine](/docs/en/integrations/data-ingestion/kafka/index.md) is not generally available in ClickHouse Cloud. Instead, we recommend relying on architectures that decouple the Kafka connectivity components from the ClickHouse service to achieve a separation of concerns. We recommend [ClickPipes](https://clickhouse.com/cloud/clickpipes) for pulling data from a Kafka stream. Alternatively, consider the push-based alternatives listed in the [Kafka User Guide](/docs/en/integrations/data-ingestion/kafka/index.md) ## Operational Defaults and Considerations The following are default settings for ClickHouse Cloud services. In some cases, these settings are fixed to ensure the correct operation of the service, and in others, they can be adjusted. ### Operational limits -### `max_parts_in_total: 10,000` +#### `max_parts_in_total: 10,000` The default value of the `max_parts_in_total` setting for MergeTree tables has been lowered from 100,000 to 10,000. The reason for this change is that we observed that a large number of data parts is likely to cause a slow startup time of services in the cloud. A large number of parts usually indicate a choice of too granular partition key, which is typically done accidentally and should be avoided. The change of default will allow the detection of these cases earlier. -### `max_concurrent_queries: 1,000` +#### `max_concurrent_queries: 1,000` Increased this per-server setting from the default of 100 to 1000 to allow for more concurrency. This will result in 2,000 concurrent queries for development services and 3,000 for production. -### `max_table_size_to_drop: 1,000,000,000,000` +#### `max_table_size_to_drop: 1,000,000,000,000` Increased this setting from 50GB to allow for dropping of tables/partitions up to 1TB. ### System settings @@ -112,7 +115,7 @@ The table below summarizes our efforts to expand some of the capabilities descri |-------------------------------------------------------------------------|:----------------------------------------| |Dictionary support: PostgreSQL, MySQL, remote and local ClickHouse servers, Redis, MongoDB and HTTP sources | **Added in GA** | |SQL user-defined functions (UDFs) | **Added in GA** | -|MySQL and Postgres engine | **Added in GA** | +|MySQL and PostgreSQL engine | **Added in GA** | |Engines for SQLite, ODBC, JDBC, Redis, RabbitMQ, HDFS, and Hive | ✔ | |MySQL & Postgres interfaces | ✔ | |Kafka Table Engine | Not recommended; see alternatives above | diff --git a/docs/en/cloud/security/compliance-and-certification.md b/docs/en/cloud/security/compliance-and-certification.md new file mode 100644 index 00000000000..6c114a3ce78 --- /dev/null +++ b/docs/en/cloud/security/compliance-and-certification.md @@ -0,0 +1,17 @@ +--- +slug: /en/manage/security/compliance-and-certification +sidebar_label: Compliance and Certification +title: Compliance and Certification +--- + +# Compliance and Certification + +ClickHouse Cloud adheres to the following compliance frameworks: +- [SOC 2](https://secureframe.com/hub/soc-2/what-is-soc-2) +- [ISO 27001](https://www.iso.org/standard/27001) +- [GDPR](https://gdpr-info.eu/) +- [CCPA](https://oag.ca.gov/privacy/ccpa) + +We also provide a secure method to pay by credit card that is compliant with [PCI SAQ A v4.0](https://www.pcisecuritystandards.org/document_library/). + +To download detailed reports, please see our [Trust Center](https://trust.clickhouse.com/). diff --git a/docs/en/guides/creating-tables.md b/docs/en/guides/creating-tables.md index 3d4f4c89306..1c89f9dee6b 100644 --- a/docs/en/guides/creating-tables.md +++ b/docs/en/guides/creating-tables.md @@ -5,7 +5,7 @@ sidebar_label: Creating Tables # Creating Tables in ClickHouse - Like most database management systems, ClickHouse logically groups tables into **databases**. Use the `CREATE DATABASE` command to create a new database in ClickHouse: + Like most databases, ClickHouse logically groups tables into **databases**. Use the `CREATE DATABASE` command to create a new database in ClickHouse: ```sql CREATE DATABASE IF NOT EXISTS helloworld @@ -62,4 +62,4 @@ In the example above, `my_first_table` is a `MergeTree` table with four columns: :::tip For more details, check out the [Creating Databases and Tables](https://learn.clickhouse.com/visitor_catalog_class/show/1043458/) training course in ClickHouse Academy. -::: \ No newline at end of file +::: diff --git a/docs/en/guides/inserting-data.md b/docs/en/guides/inserting-data.md index 37ad6744706..ac9da75be76 100644 --- a/docs/en/guides/inserting-data.md +++ b/docs/en/guides/inserting-data.md @@ -22,10 +22,15 @@ Let's verify it worked - you should see the four rows of data that were inserted SELECT * FROM helloworld.my_first_table ``` -:::tip -Insert a large number of rows per batch - tens of thousands or even millions of rows at once. Don't worry - ClickHouse can easily handle that type of volume! +:::note Need help inserting large datasets? +If you need help inserting large datasets or encounter any errors when importing data into ClickHouse Cloud, please contact us at support@clickhouse.com and we can assist. ::: + +## Insert large batches + +Insert a large number of rows per batch - tens of thousands or even millions of rows at once. Inserting in batches optimizes for insert performance. Don't worry - ClickHouse can easily handle that type of volume! + :::tip If you can not insert a lot of rows at once and you are using an HTTP client, use the [`async_insert` setting](../operations/settings/settings.md#async-insert), which batches your smaller inserts before inserting them into the table. ::: diff --git a/docs/en/guides/sre/keeper/index.md b/docs/en/guides/sre/keeper/index.md index 8d0f5ae08e4..e2267013bc9 100644 --- a/docs/en/guides/sre/keeper/index.md +++ b/docs/en/guides/sre/keeper/index.md @@ -379,7 +379,7 @@ The following features are available: ### Migration from ZooKeeper {#migration-from-zookeeper} -Seamlessly migration from ZooKeeper to ClickHouse Keeper is impossible you have to stop your ZooKeeper cluster, convert data and start ClickHouse Keeper. `clickhouse-keeper-converter` tool allows converting ZooKeeper logs and snapshots to ClickHouse Keeper snapshot. It works only with ZooKeeper > 3.4. Steps for migration: +Seamless migration from ZooKeeper to ClickHouse Keeper is not possible. You have to stop your ZooKeeper cluster, convert data, and start ClickHouse Keeper. `clickhouse-keeper-converter` tool allows converting ZooKeeper logs and snapshots to ClickHouse Keeper snapshot. It works only with ZooKeeper > 3.4. Steps for migration: 1. Stop all ZooKeeper nodes. diff --git a/docs/en/integrations/data-ingestion/clickpipes/index.md b/docs/en/integrations/data-ingestion/clickpipes/index.md index 0835f22b9af..04d78da61ca 100644 --- a/docs/en/integrations/data-ingestion/clickpipes/index.md +++ b/docs/en/integrations/data-ingestion/clickpipes/index.md @@ -142,7 +142,6 @@ Nullable versions of the above are also supported with these exceptions: ## Current Limitations -- During the Private Preview phase, ClickPipes is available only on the services backed by Amazon Web Services, in the `us-east-2` and `eu-central-1` regions. - Private Link support isn't currently available for ClickPipes but will be released in the near future. ## List of Static IPs diff --git a/docs/en/integrations/data-ingestion/etl-tools/nifi-and-clickhouse.md b/docs/en/integrations/data-ingestion/etl-tools/nifi-and-clickhouse.md index d350e5d1974..917a086d667 100644 --- a/docs/en/integrations/data-ingestion/etl-tools/nifi-and-clickhouse.md +++ b/docs/en/integrations/data-ingestion/etl-tools/nifi-and-clickhouse.md @@ -22,7 +22,7 @@ import ConnectionDetails from '@site/docs/en/_snippets/_gather_your_details_http ## 3. Download the ClickHouse JDBC driver 1. Visit the ClickHouse JDBC driver release page on GitHub and look for the latest JDBC release version -2. In the release version, click on "Show all xx assets" and look for the JAR file containing the keyword "shaded" or "all", for example, `clickhouse-jdbc-0.4.6-all.jar` +2. In the release version, click on "Show all xx assets" and look for the JAR file containing the keyword "shaded" or "all", for example, `clickhouse-jdbc-0.5.0-all.jar` 3. Place the JAR file in a folder accessible by Apache NiFi and take note of the absolute path ## 4. Add DBCPConnectionPool Controller Service and configure its properties diff --git a/docs/en/integrations/data-ingestion/kafka/confluent/index.md b/docs/en/integrations/data-ingestion/kafka/confluent/index.md new file mode 100644 index 00000000000..a054b1d7c5a --- /dev/null +++ b/docs/en/integrations/data-ingestion/kafka/confluent/index.md @@ -0,0 +1,13 @@ +--- +sidebar_label: Confluent Platform +sidebar_position: 1 +slug: /en/integrations/kafka/cloud/confluent +description: Kafka Connectivity with Confluent Cloud +--- + +# Integrating Confluent Cloud with ClickHouse + +Confluent platform provides two options to integration with ClickHouse + +* [ClickHouse Connect Sink on Confluent Cloud](./custom-connector.md) using the custom connectors feature +* [HTTP Sink Connector for Confluent Platform](./kafka-connect-http.md) that integrates Apache Kafka with an API via HTTP or HTTPS \ No newline at end of file diff --git a/docs/en/integrations/data-ingestion/kafka/index.md b/docs/en/integrations/data-ingestion/kafka/index.md index cb7de64fee5..657fb316acf 100644 --- a/docs/en/integrations/data-ingestion/kafka/index.md +++ b/docs/en/integrations/data-ingestion/kafka/index.md @@ -7,47 +7,30 @@ description: Introduction to Kafka with ClickHouse # Integrating Kafka with ClickHouse -[Apache Kafka](https://kafka.apache.org/) is an open-source distributed event streaming platform used by thousands of companies for high-performance data pipelines, streaming analytics, data integration, and mission-critical applications. In most cases involving Kafka and ClickHouse, users will wish to insert Kafka based data into ClickHouse - although the reverse is supported. Below we outline several options for both use cases, identifying the pros and cons of each approach. +[Apache Kafka](https://kafka.apache.org/) is an open-source distributed event streaming platform used by thousands of companies for high-performance data pipelines, streaming analytics, data integration, and mission-critical applications. In most cases involving Kafka and ClickHouse, users will wish to insert Kafka based data into ClickHouse. Below we outline several options for both use cases, identifying the pros and cons of each approach. -For those who do not have a Kafka instance to hand, we recommend [Confluent Cloud](https://www.confluent.io/get-started/), which offers a free tier adequate for testing these examples. For self-managed alternatives, consider the [Confluent for Kubernetes](https://docs.confluent.io/operator/current/overview.html) or [here](https://docs.confluent.io/platform/current/installation/installing_cp/overview.html) for non-Kubernetes environments. - -## Assumptions - -* You are familiar with the Kafka fundamentals, such as producers, consumers and topics. -* You have a topic prepared for these examples. We assume all data is stored in Kafka as JSON, although the principles remain the same if using Avro. -* We utilise the excellent [kcat](https://github.com/edenhill/kcat) (formerly kafkacat) in our examples to publish and consume Kafka data. -* Whilst we reference some python scripts for loading sample data, feel free to adapt the examples to your dataset. -* You are broadly familiar with ClickHouse materialized views. - -# Choosing an option +## Choosing an option When integrating Kafka with ClickHouse, you will need to make early architectural decisions about the high-level approach used. We outline the most common strategies below: -### ClickPipes for Kafka (new) -* [ClickPipes](../clickpipes/index.md) offers the easiest and most intuitive way to ingest data into ClickHouse Cloud. With support for Apache Kafka and Confluent today, and many more data sources coming soon. +### ClickPipes for Kafka (ClickHouse Cloud) +* [**ClickPipes**](../clickpipes/index.md) offers the easiest and most intuitive way to ingest data into ClickHouse Cloud. With support for Apache Kafka, Confluent Cloud and Amazon MSK today, and many more data sources coming soon. -:::note -ClickPipes is a native capability of [ClickHouse Cloud](https://clickhouse.com/cloud) currently under private preview. -::: -### Cloud-based Kafka Connectivity -* [**Confluent Cloud**](https://confluent.cloud) - Confluent platform provides an option to upload and [run ClickHouse Connector Sink on Confluent Cloud](./confluent/custom-connector.md) or use [HTTP Sink Connector for Confluent Platform](./confluent/kafka-connect-http.md) that integrates Apache Kafka with an API via HTTP or HTTPS. +### 3rd-Party Cloud-based Kafka Connectivity +* [**Confluent Cloud**](./confluent/index.md) - Confluent platform provides an option to upload and [run ClickHouse Connector Sink on Confluent Cloud](./confluent/custom-connector.md) or use [HTTP Sink Connector for Confluent Platform](./confluent/kafka-connect-http.md) that integrates Apache Kafka with an API via HTTP or HTTPS. -* [**Amazon MSK**](./msk/index.md) - support Amazon MSK Connect framework to forward data from Apache Kafka clusters to external systems such as ClickHouse. You can install **ClickHouse Kafka Connect** on Amazon MSK. +* [**Amazon MSK**](./msk/index.md) - support Amazon MSK Connect framework to forward data from Apache Kafka clusters to external systems such as ClickHouse. You can install ClickHouse Kafka Connect on Amazon MSK. ### Self-managed Kafka Connectivity -* [**Kafka Connect**](./kafka-clickhouse-connect-sink.md) - Kafka Connect is a free, open-source component of Apache Kafka® that works as a centralized data hub for simple data integration between Kafka and other data systems. Connectors provide a simple means of scalably and reliably streaming data to and from Kafka. Source Connectors inserts data to Kafka topics from other systems, whilst Sink Connectors delivers data from Kafka topics into other data stores such as ClickHouse. +* [**Kafka Connect**](./kafka-clickhouse-connect-sink.md) - Kafka Connect is a free, open-source component of Apache Kafka that works as a centralized data hub for simple data integration between Kafka and other data systems. Connectors provide a simple means of scalably and reliably streaming data to and from Kafka. Source Connectors inserts data to Kafka topics from other systems, whilst Sink Connectors delivers data from Kafka topics into other data stores such as ClickHouse. * [**Vector**](./kafka-vector.md) - Vector is a vendor agnostic data pipeline. With the ability to read from Kafka, and send events to ClickHouse, this represents a robust integration option. * [**JDBC Connect Sink**](./kafka-connect-jdbc.md) - The Kafka Connect JDBC Sink connector allows you to export data from Kafka topics to any relational database with a JDBC driver * **Custom code** - Custom code using respective client libraries for Kafka and ClickHouse may be appropriate cases where custom processing of events is required. This is beyond the scope of this documentation. +* [**Kafka table engine**](./kafka-table-engine.md) provides a Native ClickHouse integration (not available on ClickHouse Cloud). This table engine **pulls** data from the source system. This requires ClickHouse to have direct access to Kafka. -### Kafka table engine -* The [Kafka table engine](./kafka-table-engine.md) provides a Native ClickHouse integration. This table engine **pulls** data from the source system. This requires ClickHouse to have direct access to Kafka. -:::note -Kafka table engine is not supported on [ClickHouse Cloud](https://clickhouse.com/cloud). Please consider one of the alternatives listed on the page. -::: ### Choosing an approach It comes down to a few decision points: @@ -58,3 +41,14 @@ It comes down to a few decision points: * **External enrichment** - Whilst messages can be manipulated before insertion into ClickHouse, through the use of functions in the select statement of the materialized view, users may prefer to move complex enrichment external to ClickHouse. * **Data flow direction** - Vector only supports the transfer of data from Kafka to ClickHouse. + + +## Assumptions + +The user guides linked above assume the following: + +* You are familiar with the Kafka fundamentals, such as producers, consumers and topics. +* You have a topic prepared for these examples. We assume all data is stored in Kafka as JSON, although the principles remain the same if using Avro. +* We utilise the excellent [kcat](https://github.com/edenhill/kcat) (formerly kafkacat) in our examples to publish and consume Kafka data. +* Whilst we reference some python scripts for loading sample data, feel free to adapt the examples to your dataset. +* You are broadly familiar with ClickHouse materialized views. \ No newline at end of file diff --git a/docs/en/integrations/data-ingestion/kafka/msk/index.md b/docs/en/integrations/data-ingestion/kafka/msk/index.md index 129c2958476..8b1bceb6823 100644 --- a/docs/en/integrations/data-ingestion/kafka/msk/index.md +++ b/docs/en/integrations/data-ingestion/kafka/msk/index.md @@ -12,7 +12,6 @@ import ConnectionDetails from '@site/docs/en/_snippets/_gather_your_details_http We assume: * you are familiar with [ClickHouse Connector Sink](../kafka-clickhouse-connect-sink.md),Amazon MSK and MSK Connectors. We recommend the Amazon MSK [Getting Started guide](https://docs.aws.amazon.com/msk/latest/developerguide/getting-started.html) and [MSK Connect guide](https://docs.aws.amazon.com/msk/latest/developerguide/msk-connect.html). * The MSK broker is publicly accessible. See the [Public Access](https://docs.aws.amazon.com/msk/latest/developerguide/public-access.html) section of the Developer Guide. - * If you wish to allow-list the static IPs for ClickPipes, they can be found [here](../clickpipes/index.md#list-of-static-ips). ## The official Kafka connector from ClickHouse with Amazon MSK diff --git a/docs/en/integrations/data-visualization.md b/docs/en/integrations/data-visualization.md index 9484f81a595..dc7f2a407c8 100644 --- a/docs/en/integrations/data-visualization.md +++ b/docs/en/integrations/data-visualization.md @@ -33,20 +33,21 @@ Now that your data is in ClickHouse, it's time to analyze it, which often involv ## ClickHouse Cloud Compatibility with Data Visualization Tools -| Tool | Supported via | Tested | Documented | Comment | -|-----------------------------------------------------------------------|-------------------------------|--------|------------|--------------------------------------------------------------------------------------------------------------------------| -| [Apache Superset](./data-visualization/superset-and-clickhouse.md) | ClickHouse official connector | ✅ | ✅ | | -| [AWS QuickSight](./data-visualization/quicksight-and-clickhouse.md) | MySQL interface | ✅ | ✅ | Works with some limitations, see [the documentation](./data-visualization/quicksight-and-clickhouse.md) for more details | -| [Deepnote](./data-visualization/deepnote.md) | Native connector | ✅ | ✅ | | -| [Explo](./data-visualization/explo-and-clickhouse.md) | Native connector | ✅ | ✅ | | -| [Grafana](./data-visualization/grafana-and-clickhouse.md) | ClickHouse official connector | ✅ | ✅ | | -| [Looker](./data-visualization/looker-and-clickhouse.md) | Native connector | ✅ | ✅ | Works with some limitations, see [the documentation](./data-visualization/looker-and-clickhouse.md) for more details | -| Looker | MySQL interface | 🚧 | ❌ | | -| [Looker Studio](./data-visualization/looker-studio-and-clickhouse.md) | MySQL interface | ✅ | ✅ | | -| [Metabase](./data-visualization/metabase-and-clickhouse.md) | ClickHouse official connector | ✅ | ✅ | | -| [Power BI Desktop](./data-visualization/powerbi-and-clickhouse.md) | ClickHouse official connector | ✅ | ✅ | Via ODBC, not suitable for large workloads (no direct query mode) | -| Power BI service | ClickHouse official connector | 🚧 | ❌ | | -| [Rocket BI](./data-visualization/rocketbi-and-clickhouse.md) | Native connector | ✅ | ❌ | | -| [Tableau Desktop](./data-visualization/tableau-and-clickhouse.md) | ClickHouse official connector | ✅ | ✅ | Certification in progress | -| Tableau Online | MySQL interface | 🚧 | ❌ | Compatibility fix in progress | -| [Zing Data](./data-visualization/zingdata-and-clickhouse.md) | Native connector | ✅ | ✅ | | \ No newline at end of file +| Tool | Supported via | Tested | Documented | Comment | +|-------------------------------------------------------------------------|-------------------------------|--------|------------|------------------------------------------------------------------------------------------------------------------------------| +| [Apache Superset](./data-visualization/superset-and-clickhouse.md) | ClickHouse official connector | ✅ | ✅ | | +| [AWS QuickSight](./data-visualization/quicksight-and-clickhouse.md) | MySQL interface | ✅ | ✅ | Works with some limitations, see [the documentation](./data-visualization/quicksight-and-clickhouse.md) for more details | +| [Deepnote](./data-visualization/deepnote.md) | Native connector | ✅ | ✅ | | +| [Explo](./data-visualization/explo-and-clickhouse.md) | Native connector | ✅ | ✅ | | +| [Grafana](./data-visualization/grafana-and-clickhouse.md) | ClickHouse official connector | ✅ | ✅ | | +| [Hashboard](./data-visualization/hashboard-and-clickhouse.md) | Native connector | ✅ | ✅ | | +| [Looker](./data-visualization/looker-and-clickhouse.md) | Native connector | ✅ | ✅ | Works with some limitations, see [the documentation](./data-visualization/looker-and-clickhouse.md) for more details | +| Looker | MySQL interface | 🚧 | ❌ | | +| [Looker Studio](./data-visualization/looker-studio-and-clickhouse.md) | MySQL interface | ✅ | ✅ | | +| [Metabase](./data-visualization/metabase-and-clickhouse.md) | ClickHouse official connector | ✅ | ✅ | | +| [Power BI Desktop](./data-visualization/powerbi-and-clickhouse.md) | ClickHouse official connector | ✅ | ✅ | Via ODBC, not suitable for large workloads (no direct query mode) | +| Power BI service | ClickHouse official connector | 🚧 | ❌ | | +| [Rocket BI](./data-visualization/rocketbi-and-clickhouse.md) | Native connector | ✅ | ❌ | | +| [Tableau Desktop](./data-visualization/tableau-and-clickhouse.md) | ClickHouse official connector | ✅ | ✅ | Certification in progress | +| [Tableau Online](./data-visualization/tableau-online-and-clickhouse.md) | MySQL interface | ✅ | ✅ | Works with some limitations, see [the documentation](./data-visualization/tableau-online-and-clickhouse.md) for more details | +| [Zing Data](./data-visualization/zingdata-and-clickhouse.md) | Native connector | ✅ | ✅ | | diff --git a/docs/en/integrations/data-visualization/hashboard-and-clickhouse.md b/docs/en/integrations/data-visualization/hashboard-and-clickhouse.md new file mode 100644 index 00000000000..6b74ce12ae0 --- /dev/null +++ b/docs/en/integrations/data-visualization/hashboard-and-clickhouse.md @@ -0,0 +1,47 @@ +--- +sidebar_label: Hashboard +sidebar_position: 132 +slug: /en/integrations/hashboard +keywords: [clickhouse, hashboard, connect, integrate, ui, analytics] +description: Hashboard is a robust analytics platform that can be easily integrated with ClickHouse for real-time data analysis. +--- +import ConnectionDetails from '@site/docs/en/_snippets/_gather_your_details_native.md'; + +# Connecting ClickHouse to Hashboard + +[Hashboard](https://hashboard.com) is an interactive data exploration tool that enables anyone in your organization to track metrics and discover actionable insights. Hashboard issues live SQL queries to your ClickHouse database and is particularly useful for self-serve, adhoc data exploration use cases. + + +Hashboard data explorer + +
+ +This guide will walk you through the steps to connect Hashboard with your ClickHouse instance. This information is also available on Hashboard's [ClickHouse integration documentation](https://docs.hashboard.com/docs/database-connections/clickhouse). + + +## Pre-requisites + +- A ClickHouse database either hosted on your own infrastructure or on [ClickHouse Cloud](https://clickhouse.com/). +- A [Hashboard account](https://hashboard.com/getAccess) and project. + +## Steps to Connect Hashboard to ClickHouse + +### 1. Gather Your Connection Details + + + +### 2. Add a New Database Connection in Hashboard + +1. Navigate to your [Hashboard project](https://hashboard.com/app). +2. Open the Settings page by clicking the gear icon in the side navigation bar. +3. Click `+ New Database Connection`. +4. In the modal, select "ClickHouse." +5. Fill in the **Connection Name**, **Host**, **Port**, **Username**, **Password**, and **Database** fields with the information gathered earlier. +6. Click "Test" to validate that the connection is configured successfully. +7. Click "Add" + +Your ClickHouse database is now be connected to Hashboard and you can proceed by building [Data Models](https://docs.hashboard.com/docs/data-modeling/add-data-model), [Explorations](https://docs.hashboard.com/docs/visualizing-data/explorations), [Metrics](https://docs.hashboard.com/docs/metrics), and [Dashboards](https://docs.hashboard.com/docs/dashboards). See the corresponding Hashboard documentation for more detail on these features. + +## Learn More + +For more advanced features and troubleshooting, visit [Hashboard's documentation](https://docs.hashboard.com/). diff --git a/docs/en/integrations/data-visualization/images/hashboard_01.png b/docs/en/integrations/data-visualization/images/hashboard_01.png new file mode 100644 index 00000000000..d452a071971 Binary files /dev/null and b/docs/en/integrations/data-visualization/images/hashboard_01.png differ diff --git a/docs/en/integrations/data-visualization/images/tableau_online_01.png b/docs/en/integrations/data-visualization/images/tableau_online_01.png new file mode 100644 index 00000000000..63c44f72a4f Binary files /dev/null and b/docs/en/integrations/data-visualization/images/tableau_online_01.png differ diff --git a/docs/en/integrations/data-visualization/images/tableau_online_02.png b/docs/en/integrations/data-visualization/images/tableau_online_02.png new file mode 100644 index 00000000000..ee62094faf3 Binary files /dev/null and b/docs/en/integrations/data-visualization/images/tableau_online_02.png differ diff --git a/docs/en/integrations/data-visualization/images/tableau_online_03.png b/docs/en/integrations/data-visualization/images/tableau_online_03.png new file mode 100644 index 00000000000..4f27e91a15a Binary files /dev/null and b/docs/en/integrations/data-visualization/images/tableau_online_03.png differ diff --git a/docs/en/integrations/data-visualization/images/tableau_online_04.png b/docs/en/integrations/data-visualization/images/tableau_online_04.png new file mode 100644 index 00000000000..df29d71ebcd Binary files /dev/null and b/docs/en/integrations/data-visualization/images/tableau_online_04.png differ diff --git a/docs/en/integrations/data-visualization/looker-studio-and-clickhouse.md b/docs/en/integrations/data-visualization/looker-studio-and-clickhouse.md index 43283cace10..cda26fe382e 100644 --- a/docs/en/integrations/data-visualization/looker-studio-and-clickhouse.md +++ b/docs/en/integrations/data-visualization/looker-studio-and-clickhouse.md @@ -1,7 +1,7 @@ --- sidebar_label: Looker Studio slug: /en/integrations/lookerstudio -keywords: [clickhouse, looker, studio, connect, integrate, ui] +keywords: [clickhouse, looker, studio, connect, mysql, integrate, ui] description: Looker Studio, formerly Google Data Studio, is an online tool for converting data into customizable informative reports and dashboards. --- diff --git a/docs/en/integrations/data-visualization/quicksight-and-clickhouse.md b/docs/en/integrations/data-visualization/quicksight-and-clickhouse.md index d2744e608b3..62509bbfa41 100644 --- a/docs/en/integrations/data-visualization/quicksight-and-clickhouse.md +++ b/docs/en/integrations/data-visualization/quicksight-and-clickhouse.md @@ -1,7 +1,7 @@ --- sidebar_label: QuickSight slug: /en/integrations/quicksight -keywords: [clickhouse, aws, amazon, quicksight, connect, integrate, ui] +keywords: [clickhouse, aws, amazon, quicksight, mysql, connect, integrate, ui] description: Amazon QuickSight powers data-driven organizations with unified business intelligence (BI) at hyperscale. --- diff --git a/docs/en/integrations/data-visualization/tableau-and-clickhouse.md b/docs/en/integrations/data-visualization/tableau-and-clickhouse.md index 1a31d700979..8bb71708674 100644 --- a/docs/en/integrations/data-visualization/tableau-and-clickhouse.md +++ b/docs/en/integrations/data-visualization/tableau-and-clickhouse.md @@ -17,7 +17,7 @@ Tableau can use ClickHouse databases and tables as a data source. This requires 2. Download and install Tableau desktop. -3. Download the latest version of the ANALYTIKA PLUS clickhouse-tableau-connector-jdbc TACO connector. +3. Download the latest version of the clickhouse-tableau-connector-jdbc TACO connector. 4. Store the TACO connector in the following folder (based on your OS): - macOS: `~/Documents/My Tableau Repository/Connectors` - Windows: `C:\Users[Windows User]\Documents\My Tableau Repository\Connectors` @@ -38,11 +38,11 @@ Now that you have the driver and connector in the appropriate folders on your ma 1. Start Tableau. (If you already had it running, then restart it.) -2. From the left-side menu, click on **More** under the **To a Server** section. If everything worked properly, you should see **ClickHouse (JDBC) by ANALYTIKA PLUS** in the list of installed connectors: +2. From the left-side menu, click on **More** under the **To a Server** section. If everything worked properly, you should see **ClickHouse JDBC** in the list of installed connectors: - ![ClickHouse (JDBC) by ANALYTIKA PLUS](./images/tableau_connecttoserver.png) + ![ClickHouse JDBC](./images/tableau_connecttoserver.png) -3. Click on **ClickHouse (JDBC) by ANALYTIKA PLUS** and a dialog window pops up. Enter the following details: +3. Click on **ClickHouse JDBC** and a dialog window pops up. Enter the following details: | Setting | Value | | ----------- | ----------- | @@ -122,4 +122,4 @@ You should see the following: Tableau is great, and we love that it connects so nicely to ClickHouse! If you are new to Tableau, check out their documentation for help on building dashboards and visualizations. ::: -**Summary:** You can connect Tableau to ClickHouse using the generic ODBC/JDBC ClickHouse driver, but we really like how this tool from ANALYTIKA PLUS simplifies the process of setting up the connection. If you have any issues with the connector, feel free to reach out to ANALYTIKA PLUS on GitHub. +**Summary:** You can connect Tableau to ClickHouse using the generic ODBC/JDBC ClickHouse driver. However, this connector streamlines the connection setup process. If you have any issues with the connector, feel free to reach out on GitHub. diff --git a/docs/en/integrations/data-visualization/tableau-online-and-clickhouse.md b/docs/en/integrations/data-visualization/tableau-online-and-clickhouse.md new file mode 100644 index 00000000000..d517555cb33 --- /dev/null +++ b/docs/en/integrations/data-visualization/tableau-online-and-clickhouse.md @@ -0,0 +1,51 @@ +--- +sidebar_label: Tableau Online +slug: /en/integrations/tableau-online +keywords: [clickhouse, tableau, online, mysql, connect, integrate, ui] +description: Tableau Online streamlines the power of data to make people faster and more confident decision makers from anywhere. +--- + +import MySQLCloudSetup from '@site/docs/en/_snippets/_clickhouse_mysql_cloud_setup.mdx'; +import MySQLOnPremiseSetup from '@site/docs/en/_snippets/_clickhouse_mysql_on_premise_setup.mdx'; + +# Tableau Online + +Tableau Online can connect to ClickHouse Cloud or on-premise ClickHouse setup via MySQL interface using the official MySQL data source. + +## ClickHouse Cloud Setup + + +## On-premise ClickHouse Server Setup + + +## Connecting Tableau Online to ClickHouse + +Login to your Tableau Cloud site and add a new Published Data Source. + +Creating a new published data source +
+ +Select "MySQL" from the list of available connectors. + +Selecting MySQL connector +
+ +Specify your connection details gathered during the ClickHouse setup. + +Specifying your connection details +
+ +Tableau Online will introspect the database and provide a list of available tables. Drag the desired table to the canvas on the right. Additionally, you can click "Update Now" to preview the data, as well as fine-tune the introspected field types or names. + +Selecting the tables to use +
+ +After that, all that remains is to click "Publish As" in the top right corner, and you should be able to use a newly created dataset in Tableau Online as usual. + +NB: if you want to use Tableau Online in combination with Tableau Desktop and share ClickHouse datasets between them, make sure you use Tableau Desktop with the default MySQL connector as well, following the setup guide that is displayed [here](https://www.tableau.com/support/drivers) if you select MySQL from the Data Source drop-down. If you have an M1 Mac, check [this troubleshooting thread](https://community.tableau.com/s/question/0D58b0000Ar6OhvCQE/unable-to-install-mysql-driver-for-m1-mac) for a driver installation workaround. + +## Known limitations +* Aggregation by week number does not work. Should be resolved after [#54794](https://github.com/ClickHouse/ClickHouse/issues/54794). +* Aggregations by truncated dates don't work. Should be resolved after [#54795](https://github.com/ClickHouse/ClickHouse/issues/54795). +* Aggregations over tables joined with "relationships" feature don't work. Should be resolved after [#55182](https://github.com/ClickHouse/ClickHouse/issues/55182). +* (Tableau Desktop via MySQL only) A table cannot be selected from the table browser during the connection setup. Use "Custom SQL" as a workaround. Should be resolved after [#55183](https://github.com/ClickHouse/ClickHouse/issues/55183). \ No newline at end of file diff --git a/docs/en/integrations/images/logos/hashboard.svg b/docs/en/integrations/images/logos/hashboard.svg new file mode 100644 index 00000000000..969b33d86fa --- /dev/null +++ b/docs/en/integrations/images/logos/hashboard.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/docs/en/integrations/index.mdx b/docs/en/integrations/index.mdx index e3df6404106..13dc2044847 100644 --- a/docs/en/integrations/index.mdx +++ b/docs/en/integrations/index.mdx @@ -18,6 +18,7 @@ import DbeaverSVG from "./images/logos/dbeaver_logo.svg"; import DbtSVG from "./images/logos/dbt.svg"; import JavaSVG from "./images/logos/java.svg"; import JdbcSVG from "./images/logos/jdbc.svg"; +import HashboardSVG from "./images/logos/hashboard.svg" import HiveSVG from "./images/logos/hive.svg"; import HdfsSVG from "./images/logos/hadoop.svg"; import KafkaSVG from "./images/logos/kafka.svg"; @@ -102,7 +103,7 @@ We are actively compiling this list of ClickHouse integrations below, so it's no |Redis||Data integration|Allows ClickHouse to use [Redis](https://redis.io/) as a dictionary source.|[Documentation](/docs/en/sql-reference/dictionaries/index.md#redis)| |SQLite||Data integration|Allows to import and export data to SQLite and supports queries to SQLite tables directly from ClickHouse.|[Documentation](/docs/en/engines/table-engines/integrations/sqlite)| |Superset||Data visualization|Explore and visualize your ClickHouse data with Apache Superset.|[Documentation](/docs/en/integrations/data-visualization/superset-and-clickhouse.md)| - +|Tableau Online|Tableau Online logo|Data visualization|Tableau Online streamlines the power of data to make people faster and more confident decision makers from anywhere|[Documentation](/docs/en/integrations/tableau-online)|
@@ -127,6 +128,7 @@ We are actively compiling this list of ClickHouse integrations below, so it's no |EMQX||Data ingestion|EMQX is an open source MQTT broker with a high-performance real-time message processing engine, powering event streaming for IoT devices at massive scale.|[Documentation](https://clickhouse.com/docs/en/integrations/mqtt)| |Explo|Explo logo|Data visualization|Explo is a customer-facing analytics tool for any platform|[Documentation](/docs/en/integrations/explo)| |Grafana||Data visualization|With Grafana you can create, explore and share all of your data through dashboards.|[Documentation](/docs/en/integrations/data-visualization/grafana-and-clickhouse.md)| +|Hashboard||Data visualization|[Hashboard](https://hashboard.com) is a business intelligence platform that enables self-service data exploration and metric tracking.|[Documentation](https://docs.hashboard.com/docs/database-connections/clickhouse)| |HEX|HEX logo|Data visualization|Hex is a modern, collaborative platform with notebooks, data apps, SQL, Python, no-code, R, and so much more.|[Documentation](https://learn.hex.tech/docs/connect-to-data/data-connections/overview)| |HighTouch|HighTouch logo|Data integration|Sync your data directly from your warehouse to 140+ destinations|[Website](https://hightouch.com/docs/sources/clickhouse)| |Holistics|Holistics logo|Data visualization|Business Intelligence for ClickHouse database|[Website](https://www.holistics.io/integrations/clickhouse/)| diff --git a/docs/en/integrations/language-clients/java/index.md b/docs/en/integrations/language-clients/java/index.md index 0d5383a38fb..c840fa081cc 100644 --- a/docs/en/integrations/language-clients/java/index.md +++ b/docs/en/integrations/language-clients/java/index.md @@ -25,7 +25,7 @@ Provides the most flexible and performant way to integrate your app with ClickHo | Client version | ClickHouse | |----------------|-------------| -| 0.4.6 | 20.7+ | +| 0.5.0 | 22.8+ | ### Installation @@ -34,7 +34,7 @@ Provides the most flexible and performant way to integrate your app with ClickHo com.clickhouse clickhouse-http-client - 0.4.6 + 0.5.0 ``` @@ -142,7 +142,7 @@ Consider [clickhouse-client](/docs/en/integrations/clickhouse-client-local.md) w | Client version | ClickHouse | |----------------|-------------| -| 0.4.6 | 20.7+ | +| 0.5.0 | 22.8+ | ### Installation @@ -150,7 +150,7 @@ Consider [clickhouse-client](/docs/en/integrations/clickhouse-client-local.md) w com.clickhouse clickhouse-jdbc - 0.4.6 + 0.5.0 all @@ -286,13 +286,16 @@ try (PreparedStatement ps = conn.prepareStatement("insert into mytable values(tr To establish a secure JDBC connection to ClickHouse using SSL, you'll need to configure your JDBC properties to include the SSL parameters. This typically involves specifying the SSL properties such as sslmode and sslrootcert in your JDBC URL/Properties object. #### SSL Properties -| Name | Default Value | Optional Values | Description | -|-------------------|--------------------|---------------------------|-----------------------------------------------| -| ssl | false | true, false | Whether to enable SSL/TLS for the connection. | -| sslmode | STRICT | verify, none | SSL mode. | -| sslrootcert | | | Path to SSL/TLS root certificates. | -| sslcert | | | Path to SSL/TLS certificate. | -| sslkey | | | RSA key in PKCS#8 format. | +| Name | Default Value | Optional Values | Description | +|---------------------|--------------------|-----------------|-------------------------------------------------------------------------------| +| ssl | false | true, false | Whether to enable SSL/TLS for the connection | +| sslmode | strict | strict, none | Whether to verify SSL/TLS certificate | +| sslrootcert | | | Path to SSL/TLS root certificates | +| sslcert | | | Path to SSL/TLS certificate | +| sslkey | | | RSA key in PKCS#8 format | +| key_store_type | | JKS, PKCS12 | Specifies the type or format of the keystore/truststore file | +| trust_store | | | Path to the truststore file | +| key_store_password | | | Password needed to access the keystore file specified in the keystore config | @@ -470,7 +473,7 @@ Alternatively, you can add equivalent parameters to the JDBC URL. | Client version | ClickHouse | |----------------|-------------| -| 0.4.6 | 20.7+ | +| 0.5.0 | 22.8+ | ### Installation @@ -479,7 +482,7 @@ Alternatively, you can add equivalent parameters to the JDBC URL. com.clickhouse clickhouse-r2dbc - 0.4.6 + 0.5.0 all diff --git a/docs/en/integrations/language-clients/python/index.md b/docs/en/integrations/language-clients/python/index.md index affa92c1136..ba156870315 100644 --- a/docs/en/integrations/language-clients/python/index.md +++ b/docs/en/integrations/language-clients/python/index.md @@ -611,7 +611,7 @@ can be provided as the `pool_mgr` keyword argument to the main `clickhouse_conne ```python import clickhouse_connect -import clickhouse_connect.httputil +from clickhouse_connect.driver import httputil big_pool_mgr = httputil.get_pool_manager(maxsize=16, num_pools=12) diff --git a/package.json b/package.json index 05fbdcdb362..57fb1089738 100644 --- a/package.json +++ b/package.json @@ -11,7 +11,7 @@ "clear": "docusaurus clear && bash ./placeholderReset.sh", "deploy": "docusaurus deploy", "docusaurus": "docusaurus", - "prep-from-local": "sh -c 'array_root=($npm_package_config_prep_array_root);array_en=($npm_package_config_prep_array_en);for folder in ${array_en[@]}; do cp -r $0/$folder docs/en;echo \"Copied $folder from [$0]\";done;for folder in ${array_root[@]}; do cp -r $0/$folder docs/;echo \"Copied $folder from [$0]\";done;echo \"Prep completed\";'", + "prep-from-local": "bash -c 'array_root=($npm_package_config_prep_array_root);array_en=($npm_package_config_prep_array_en);for folder in ${array_en[@]}; do cp -r $0/$folder docs/en;echo \"Copied $folder from [$0]\";done;for folder in ${array_root[@]}; do cp -r $0/$folder docs/;echo \"Copied $folder from [$0]\";done;echo \"Prep completed\";'", "prep-from-master": "array_root=($npm_package_config_prep_array_root);array_en=($npm_package_config_prep_array_en);ch_temp=/tmp/ch_temp_$RANDOM && mkdir -p $ch_temp && git clone --depth 1 --branch master https://github.com/ClickHouse/ClickHouse $ch_temp; for folder in ${array_en[@]}; do cp -r $ch_temp/$folder docs/en;echo \"Copied $folder from ClickHouse master branch\";done;for folder in ${array_root[@]}; do cp -r $ch_temp/$folder docs/;echo \"Copied $folder from ClickHouse master branch\";done;rm -rf $ch_temp && echo \"Prep completed\";", "serve": "docusaurus serve", "build-api-doc": "node clickhouseapi.js", @@ -27,7 +27,7 @@ "@docusaurus/preset-classic": "2.3.1", "@docusaurus/theme-mermaid": "2.3.1", "@mdx-js/react": "^1.6.22", - "axios": "^1.5.0", + "axios": "^1.5.1", "@radix-ui/react-navigation-menu": "^1.1.2", "clsx": "^2.0.0", "docusaurus-plugin-sass": "^0.2.5", diff --git a/sidebars.js b/sidebars.js index 9ba32fde019..f32a3228600 100644 --- a/sidebars.js +++ b/sidebars.js @@ -91,15 +91,15 @@ const sidebars = { items: [ 'en/integrations/data-ingestion/s3/index', 'en/integrations/data-ingestion/gcs/index', + 'en/integrations/data-ingestion/kafka/index', 'en/integrations/data-ingestion/clickpipes/index', - 'en/integrations/data-ingestion/dbms/jdbc-with-clickhouse', - 'en/integrations/data-ingestion/dbms/odbc-with-clickhouse', 'en/integrations/data-ingestion/dbms/postgresql/index', 'en/integrations/data-ingestion/dbms/mysql/index', - 'en/integrations/data-ingestion/kafka/index', 'en/integrations/data-ingestion/etl-tools/dbt/index', 'en/integrations/data-ingestion/insert-local-files', 'en/integrations/data-ingestion/redshift/index', + 'en/integrations/data-ingestion/dbms/jdbc-with-clickhouse', + 'en/integrations/data-ingestion/dbms/odbc-with-clickhouse', { type: 'category', label: 'More...', @@ -108,7 +108,6 @@ const sidebars = { collapsible: true, items: [ 'en/integrations/data-ingestion/etl-tools/airbyte-and-clickhouse', - 'en/integrations/data-ingestion/kafka/msk/index', 'en/integrations/data-ingestion/emqx/index', { type: 'link', @@ -242,6 +241,7 @@ const sidebars = { 'en/integrations/data-visualization/deepnote', 'en/integrations/data-visualization/explo-and-clickhouse', 'en/integrations/data-visualization/grafana-and-clickhouse', + 'en/integrations/data-visualization/hashboard-and-clickhouse', 'en/integrations/data-visualization/looker-and-clickhouse', 'en/integrations/data-visualization/looker-studio-and-clickhouse', 'en/integrations/data-visualization/metabase-and-clickhouse', @@ -250,6 +250,7 @@ const sidebars = { 'en/integrations/data-visualization/rocketbi-and-clickhouse', 'en/integrations/data-visualization/superset-and-clickhouse', 'en/integrations/data-visualization/tableau-and-clickhouse', + 'en/integrations/data-visualization/tableau-online-and-clickhouse', 'en/integrations/data-visualization/zingdata-and-clickhouse', ], }, @@ -527,6 +528,7 @@ const sidebars = { 'en/cloud/security/gcp-private-service-connect', 'en/cloud/security/activity-log', 'en/cloud/security/secure-s3', + 'en/cloud/security/compliance-and-certification', ], }, { diff --git a/src/theme/Admonition/styles.module.css b/src/theme/Admonition/styles.module.css index 9b299ad4d6d..2933972fa9d 100644 --- a/src/theme/Admonition/styles.module.css +++ b/src/theme/Admonition/styles.module.css @@ -11,7 +11,7 @@ .admonitionHeading { font: var(--ifm-heading-font-weight) var(--ifm-h5-font-size) / var(--ifm-heading-line-height) var(--ifm-heading-font-family); - text-transform: uppercase; + margin-bottom: 4px; } .admonitionHeading code {