-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
8 changed files
with
455 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,289 @@ | ||
<!DOCTYPE html> | ||
<html> | ||
<head> | ||
<meta charset="utf-8"> | ||
<meta name="description" | ||
content="CoR-GS: Sparse-View 3D Gaussian Splatting via Co-Regularization."> | ||
<meta name="keywords" content="Nerfies, D-NeRF, NeRF"> | ||
<meta name="viewport" content="width=device-width, initial-scale=1"> | ||
<title>CoR-GS: Sparse-View 3D Gaussian Splatting via Co-Regularization</title> | ||
|
||
<!-- Global site tag (gtag.js) - Google Analytics --> | ||
<!-- <script async src="https://www.googletagmanager.com/gtag/js?id=G-PYVRSFMDRL"></script> --> | ||
<script> | ||
window.dataLayer = window.dataLayer || []; | ||
|
||
function gtag() { | ||
dataLayer.push(arguments); | ||
} | ||
|
||
gtag('js', new Date()); | ||
|
||
gtag('config', 'G-PYVRSFMDRL'); | ||
</script> | ||
|
||
<link href="https://fonts.googleapis.com/css?family=Google+Sans|Noto+Sans|Castoro" | ||
rel="stylesheet"> | ||
|
||
<link rel="stylesheet" href="./static/css/bulma.min.css"> | ||
<link rel="stylesheet" href="./static/css/bulma-carousel.min.css"> | ||
<link rel="stylesheet" href="./static/css/bulma-slider.min.css"> | ||
<link rel="stylesheet" href="./static/css/fontawesome.all.min.css"> | ||
<link rel="stylesheet" | ||
href="https://cdn.jsdelivr.net/gh/jpswalsh/academicons@1/css/academicons.min.css"> | ||
<link rel="stylesheet" href="./static/css/index.css"> | ||
<!-- <link rel="icon" href="./static/images/favicon.svg"> --> | ||
|
||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.5.1/jquery.min.js"></script> | ||
<script defer src="./static/js/fontawesome.all.min.js"></script> | ||
<script src="./static/js/bulma-carousel.min.js"></script> | ||
<script src="./static/js/bulma-slider.min.js"></script> | ||
<script src="./static/js/index.js"></script> | ||
</head> | ||
<body> | ||
|
||
<section class="hero"> | ||
<div class="hero-body"> | ||
<div class="container is-max-desktop"> | ||
<div class="columns is-centered"> | ||
<div class="column has-text-centered"> | ||
<h1 class="title is-2 publication-title">CoR-GS: Sparse-View 3D Gaussian Splatting via Co-Regularization</h1> | ||
<div class="is-size-5 publication-authors"> | ||
<span class="author-block"> | ||
<a href="https://scholar.google.com/citations?user=WeRMtW4AAAAJ&hl=en" target="_blank"> | ||
Jiawei Zhang</a><sup>1</sup>, | ||
</span> | ||
<span class="author-block"> | ||
<a href="https://scholar.google.com/citations?hl=en&user=XG-o7LUAAAAJ" target="_blank"> | ||
Jiahe Li</a><sup>1</sup>, | ||
</span> | ||
<span class="author-block"> | ||
<a href="https://scholar.google.com/citations?user=3PURN9QAAAAJ&hl=zh-CN&authuser=1&oi=ao" target="_blank"> | ||
Xiaohan Yu</a><sup>3</sup>, | ||
</span> | ||
<span class="author-block"> | ||
<a href="https://scholar.google.com/citations?user=yTshbKkAAAAJ&hl=zh-CN&authuser=1" target="_blank"> | ||
Huang Lei</a><sup>2</sup>, | ||
</span> | ||
<span class="author-block"> | ||
<a href="https://scholar.google.com/citations?user=gIEZe5IAAAAJ&hl=en" target="_blank"> | ||
Lin Gu</a><sup>4,5</sup>, | ||
</span> | ||
<span class="author-block"> | ||
<a> | ||
Jin Zheng</a><sup>1</sup>, | ||
</span> | ||
<span class="author-block"> | ||
<a href="https://scholar.google.com/citations?user=k6l1vZIAAAAJ&hl=en" target="_blank"> | ||
Xiao Bai</a><sup>1,*</sup>, | ||
</span> | ||
</div> | ||
|
||
<div class="is-size-5 publication-authors"> | ||
<span class="author-block"><sup>1</sup>Beihang University,</span> | ||
<span class="author-block"><sup>2</sup>SKLCCSE, Institute of Artificial Intelligence, Beihang University,</span> | ||
<span class="author-block"><sup>3</sup>School of Computing, Macquarie University,</span> | ||
</div> | ||
<div class="is-size-5 publication-authors"> | ||
<span class="author-block"><sup>4</sup>RIKEN AIP,</span> | ||
<span class="author-block"><sup>5</sup>The University of Tokyo</span> | ||
</div> | ||
<!-- | ||
<div class="is-size-5"> | ||
<span style="color: rgb(165, 165, 165);">CVPR 2024</span> | ||
</div> --> | ||
|
||
<div class="column has-text-centered"> | ||
<div class="publication-links"> | ||
<!-- PDF Link. --> | ||
<span class="link-block"> | ||
<a href="https://jiaw-z.github.io/CoR-GS/" target="_blank" | ||
class="external-link button is-normal is-rounded is-light"> | ||
<span class="icon"> | ||
<i class="fas fa-file-pdf"></i> | ||
</span> | ||
<span>Paper</span> | ||
</a> | ||
</span> | ||
<span class="link-block"> | ||
<a href="https://jiaw-z.github.io/CoR-GS/" target="_blank" | ||
class="external-link button is-normal is-rounded is-light"> | ||
<span class="icon"> | ||
<i class="ai ai-arxiv"></i> | ||
</span> | ||
<span>arXiv</span> | ||
</a> | ||
</span> | ||
<!-- Video Link. --> | ||
<span class="link-block"> | ||
<a href="https://jiaw-z.github.io/CoR-GS/" target="_blank" | ||
class="external-link button is-normal is-rounded is-light"> | ||
<span class="icon"> | ||
<i class="fab fa-youtube"></i> | ||
</span> | ||
<span>Video</span> | ||
</a> | ||
</span> | ||
<!-- Code Link. --> | ||
<span class="link-block"> | ||
<a href="https://github.com/jiaw-z/CoR-GS" target="_blank" | ||
class="external-link button is-normal is-rounded is-light"> | ||
<span class="icon"> | ||
<i class="fab fa-github"></i> | ||
</span> | ||
<span>Code (TBD)</span> | ||
</a> | ||
</span> | ||
<!-- Dataset Link. --> | ||
<!-- <span class="link-block"> | ||
<a href="https://github.com/google/nerfies/releases/tag/0.1" | ||
class="external-link button is-normal is-rounded is-dark"> | ||
<span class="icon"> | ||
<i class="far fa-images"></i> | ||
</span> | ||
<span>Data</span> | ||
</a> --> | ||
</div> | ||
|
||
</div> | ||
</div> | ||
</div> | ||
</div> | ||
</div> | ||
</section> | ||
|
||
<section class="hero teaser"> | ||
<div class="container is-max-desktop"> | ||
<div class="hero-body"> | ||
<!-- <video id="teaser" autoplay muted loop playsinline height="100%"> | ||
<source src="./static/videos/teaser.mp4" | ||
type="video/mp4"> | ||
</video> --> | ||
<img src="./static/images/intro_overview.png"/> | ||
<h2 class="subtitle has-text-centered"> | ||
Illustration of how the different behaviors between two 3D Gaussian radiance fields correlated to construction quality. Gaussians with different behaviors tend to not fit the ground-truth shape well. Therefore, inaccurate reconstructions can be identified by measuring the differences without accessing ground-truth information. | ||
</h2> | ||
</div> | ||
</div> | ||
</section> | ||
|
||
|
||
<section class="section"> | ||
<div class="container is-max-desktop"> | ||
<!-- Abstract. --> | ||
<div class="columns is-centered has-text-centered"> | ||
<div class="column is-four-fifths"> | ||
<h2 class="title is-3">Abstract</h2> | ||
<div class="content has-text-justified"> | ||
<p> | ||
3D Gaussian Splatting (3DGS) creates a radiance field consisting of 3D Gaussians to represent a scene. With sparse training views, 3DGS easily suffers from overfitting, negatively impacting the reconstruction quality.</p> | ||
<p> | ||
This paper introduces a new co-regularization perspective for improving sparse-view 3DGS. When training two 3D Gaussian radiance fields with the same sparse views of a scene, we observe that the two radiance fields exhibit \textit{point disagreement} and \textit{rendering disagreement} that can unsupervisedly predict reconstruction quality, stemming from the sampling implementation in densification. We further quantify the point disagreement and rendering disagreement by evaluating the registration between Gaussians' point representations and calculating differences in their rendered pixels. The empirical study demonstrates the negative correlation between the two disagreements and accurate reconstruction, which allows us to identify inaccurate reconstruction without accessing ground-truth information. | ||
</p> | ||
<p> | ||
Based on the study, we propose CoR-GS, which identifies and suppresses inaccurate reconstruction based on the two disagreements: (\romannumeral1) Co-pruning considers Gaussians that exhibit high point disagreement in inaccurate positions and prunes them. (\romannumeral2) Pseudo-view co-regularization considers pixels that exhibit high rendering disagreement are inaccurately rendered and suppress the disagreement. | ||
</p> | ||
Results on LLFF, Mip-NeRF360, DTU, and Blender demonstrate that CoR-GS effectively regularizes the scene geometry, reconstructs the compact representations, and achieves state-of-the-art novel view synthesis quality under sparse training views. | ||
<p> | ||
|
||
</p> | ||
</div> | ||
</div> | ||
</div> | ||
<!--/ Abstract. --> | ||
|
||
|
||
<!-- Paper video. | ||
<div class="columns is-centered has-text-centered"> | ||
<div class="column is-four-fifths"> | ||
<h2 class="title is-3">Video</h2> | ||
<div class="publication-video"> | ||
<iframe src="https://www.youtube.com/embed/WKXCFNJHZ4o?rel=0&showinfo=0" | ||
frameborder="0" allow="autoplay; encrypted-media" allowfullscreen></iframe> | ||
</div> | ||
</div> | ||
</div> --> | ||
<!-- / Paper video. --> | ||
</div> | ||
</section> | ||
|
||
|
||
<section class="section"> | ||
<div class="container is-max-desktop"> | ||
|
||
<!-- Method. --> | ||
<div class="columns is-centered has-text-centered"> | ||
<div class="column is-full-width"> | ||
|
||
<h2 class="title is-3">Method</h2> | ||
<div class="content has-text-centered"> | ||
<img src="./static/images/method.png"> | ||
</div> | ||
<div class="content has-text-justified"> | ||
<p> | ||
CoR-GS trains two 3D Gaussian radiance fields with the same views and conducts co-regularization during training. It improves sparse-view 3DGS by identifying and suppressing inaccurate reconstruction based on the point disagreement and rendering disagreement. | ||
</p> | ||
</div> | ||
</div> | ||
</div> | ||
|
||
<!-- Comparison. --> | ||
<div class="columns is-centered has-text-centered"> | ||
<div class="column is-full-width"> | ||
|
||
<h3 class="title is-3">Comparison</h3> | ||
<div class="content has-text-justified"> | ||
<p> | ||
Comarison with current SOTA baselines. Zoom in for better visualization. | ||
</p> | ||
</div> | ||
<h5 class="title is-5">LLFF</h5> | ||
<div class="content has-text-centered"> | ||
<img src="./static/images/llff.png"> | ||
</div> | ||
</div> | ||
</div> | ||
|
||
|
||
</section> | ||
|
||
|
||
<!-- <section class="section" id="BibTeX"> | ||
<div class="container is-max-desktop content"> | ||
<h2 class="title">BibTeX</h2> | ||
<pre><code>@article{li2024dngaussian, | ||
title={DNGaussian: Optimizing Sparse-View 3D Gaussian Radiance Fields with Global-Local Depth Normalization}, | ||
author={Jiahe Li and Jiawei Zhang and Xiao Bai and Jin Zheng and Xin Ning and Jun Zhou and Lin Gu}, | ||
journal={arXiv preprint arXiv:2403.06912}, | ||
year={2024} | ||
}</code></pre> | ||
</div> | ||
</section> --> | ||
|
||
|
||
<footer class="footer"> | ||
<div class="container"> | ||
<div class="content has-text-centered"> | ||
<a class="icon-link" | ||
href="https://jiaw-z.github.io/CoR-GS/" target="_blank"> | ||
<i class="fas fa-file-pdf"></i> | ||
</a> | ||
<a class="icon-link" href="https://github.com/jiaw-z/CoR-GS" target="_blank" class="external-link" disabled> | ||
<i class="fab fa-github"></i> | ||
</a> | ||
</div> | ||
<div class="columns is-centered"> | ||
<div class="column is-8"> | ||
<div class="content"> | ||
<p> | ||
This website is developed based on <a | ||
href="https://github.com/nerfies/nerfies.github.io">Nerfies</a>. | ||
</p> | ||
</div> | ||
</div> | ||
</div> | ||
</div> | ||
</footer> | ||
|
||
</body> | ||
</html> |
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
Oops, something went wrong.