From 9c4d2582f1056ad04c2e64046d0747ffffcf8b75 Mon Sep 17 00:00:00 2001
From: Alexis VIALARET <alexis.vialaret@artefact.com>
Date: Thu, 2 Nov 2023 17:06:48 +0100
Subject: [PATCH] upd: contributing.md, cleanup

---
 CONTRIBUTING.md                               | 66 +++++++++++-----
 dbt_remote/{src/dbt_remote => }/cli.py        | 13 ++--
 dbt_remote/src/{dbt_remote => }/cli_input.py  |  4 +-
 .../src/{dbt_remote => }/cli_local_config.py  |  0
 dbt_remote/src/{dbt_remote => }/cli_params.py |  1 +
 dbt_remote/src/dbt_remote/__init__.py         |  0
 dbt_remote/src/{dbt_remote => }/dbt_server.py |  2 +-
 .../{dbt_remote => }/dbt_server_detector.py   |  0
 .../src/{dbt_remote => }/dbt_server_image.py  |  4 +-
 dbt_server/dbt_server.py                      |  4 +-
 dbt_server/tests/conftest.py                  | 49 ------------
 .../integration/dbt_project/dbt_project.yml   | 22 ------
 .../models/example/my_first_dbt_model.sql     | 27 -------
 .../models/example/my_second_dbt_model.sql    |  6 --
 .../dbt_project/models/example/schema.yml     | 21 ------
 .../dbt_project/seeds/test_seed.csv           |  2 -
 .../tests/integration/test_dbt_server.py      | 63 ----------------
 dbt_server/tests/unit/test_placholder.py      |  4 -
 poetry.lock                                   | 75 +++++++++++--------
 pyproject.toml                                |  7 +-
 tests/test_end_to_end.py                      | 33 ++++----
 21 files changed, 124 insertions(+), 279 deletions(-)
 rename dbt_remote/{src/dbt_remote => }/cli.py (91%)
 rename dbt_remote/src/{dbt_remote => }/cli_input.py (97%)
 rename dbt_remote/src/{dbt_remote => }/cli_local_config.py (100%)
 rename dbt_remote/src/{dbt_remote => }/cli_params.py (98%)
 delete mode 100644 dbt_remote/src/dbt_remote/__init__.py
 rename dbt_remote/src/{dbt_remote => }/dbt_server.py (99%)
 rename dbt_remote/src/{dbt_remote => }/dbt_server_detector.py (100%)
 rename dbt_remote/src/{dbt_remote => }/dbt_server_image.py (86%)
 delete mode 100644 dbt_server/tests/conftest.py
 delete mode 100644 dbt_server/tests/integration/dbt_project/dbt_project.yml
 delete mode 100644 dbt_server/tests/integration/dbt_project/models/example/my_first_dbt_model.sql
 delete mode 100644 dbt_server/tests/integration/dbt_project/models/example/my_second_dbt_model.sql
 delete mode 100644 dbt_server/tests/integration/dbt_project/models/example/schema.yml
 delete mode 100644 dbt_server/tests/integration/dbt_project/seeds/test_seed.csv
 delete mode 100644 dbt_server/tests/integration/test_dbt_server.py
 delete mode 100644 dbt_server/tests/unit/test_placholder.py

diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 8e8a10f7..c4007b33 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,6 +1,6 @@
 # Contributing to dbt-remote
 
-First off, thanks for taking the time to contribute!
+Thanks for taking the time to contribute!
 
 ## Setup
 
@@ -25,34 +25,62 @@ export LOCATION=<LOCATION>
 Follow the dbt-server deployment instructions here: [dbt-server deployment guide](../dbt_server/README.md)
 
 ### Run end-to-end tests to make sure everyting is properly setup
-Go to the testing directory that contains a dbt project.
+This should take a few minutes.
 ```shell
-cd tests/dbt_project
+poetry run pytest tests -log_cli=true -log_cli_level=info -vvv --maxfail=1
 ```
 
-From there, run the tests. This should take a few minutes.
+This makes sure that you are able to properly push images on the remote Docker registry, and that dbt commands run as expected.
+
+Once everything here is green, your are good to go.
+## **Development workflow**
+
+### Running the server locally
+This is useful to reduce turnaround time during developement as you will not necessarily have to build a new image and deploy a new Cloud Run instance.
+
+**You still need to have deployed the dbt-server once on GCP as there are resources that are needed even for local serving**
+
+Make sure the necessary env vars are available. If you deployed the dbt-server resources using the default names:
 ```shell
-poetry run pytest .. -log_cli=true -log_cli_level=info -vvv --maxfail=1
+export LOCAL=true
+export SERVICE_ACCOUNT=dbt-server-service-account@${PROJECT_ID}.iam.gserviceaccount.com
+export BUCKET_NAME=${PROJECT_ID}-dbt-server
+export DOCKER_IMAGE=${LOCATION}-docker.pkg.dev/${PROJECT_ID}/dbt-server-repository/server-image
+export ARTIFACT_REGISTRY=${LOCATION}-docker.pkg.dev/${PROJECT_ID}/dbt-server-repository
 ```
 
-Once everything here is green, your are good to go.
-## **Workflow for developing on the dbt-server**
+Start the dbt-server locally:
+```shell
+poetry run python3 dbt_server/dbt_server.py
+```
+```shell
+INFO:     Uvicorn running on http://0.0.0.0:8001 (Press CTRL+C to quit)
+INFO:     Started reloader process [64101] using StatReload
+INFO:     Started server process [64110]
+INFO:     Waiting for application startup.
+INFO:     Application startup complete.
+```
 
-To run your dbt-server in local, **you must first create all the required resources on GCP** (see the README's section 'dbt-server'). Then export your environment variables using your GCP project and newly-created resources:
-```sh
-export BUCKET_NAME=<bucket-name>
-export DOCKER_IMAGE=<docker-image>
-export SERVICE_ACCOUNT=<service-account-email>
-export PROJECT_ID=<project-id>
-export LOCATION=<location>
+**While the dbt-server code is executed locally, the actual dbt execution still happens in a cloud run job based on the docker image in your GCP project. Make sure to push a new image if you make any changes that affect it during development.**
+```shell
+poetry run python3 dbt_remote/cli.py image submit
 ```
-Finally you can run your server:
-```sh
-cd dbt_server
-poetry run python3 dbt_server.py
+
+You should now be able to call it:
+```shell
+poetry run python3 dbt_remote/cli.py debug --project-dir tests/dbt_project --server-url http://localhost:8001/
+```
+```shell
+[...]
+INFO    [dbt] Registered adapter: bigquery=1.6.8
+INFO    [dbt]   Connection test: [OK connection ok]  
+INFO    [dbt] All checks passed!
+INFO    [job] Command successfully executed
+INFO    [job] dbt-remote job finished
 ```
-> Note: This server is a Fastapi server running on 8001, you can change this set up at the end of the ```dbt_server.py``` file.
 
+## Publishing a new package version
+----------------
 ## **Workflow for working on the dbt-remote**
 
 To build and install your own version of the package, you can run (at the root of the project):
diff --git a/dbt_remote/src/dbt_remote/cli.py b/dbt_remote/cli.py
similarity index 91%
rename from dbt_remote/src/dbt_remote/cli.py
rename to dbt_remote/cli.py
index f24f2c8c..27911335 100644
--- a/dbt_remote/src/dbt_remote/cli.py
+++ b/dbt_remote/cli.py
@@ -3,14 +3,13 @@
 
 from dbt.cli import main as dbt_cli
 from dbt.cli.main import global_flags
-from  dbt.cli import params as dbt_p
-
-from src.dbt_remote.cli_local_config import LocalCliConfig
-from src.dbt_remote.dbt_server_image import DbtServerImage
-from src.dbt_remote.dbt_server import DbtServer, DbtServerCommand
-from src.dbt_remote import cli_params as p
-from src.dbt_remote.cli_input import CliInput
+from dbt.cli import params as dbt_p
 
+from src.cli_local_config import LocalCliConfig
+from src.dbt_server_image import DbtServerImage
+from src.dbt_server import DbtServer, DbtServerCommand
+from src import cli_params as p
+from src.cli_input import CliInput
 
 
 help_msg = """
diff --git a/dbt_remote/src/dbt_remote/cli_input.py b/dbt_remote/src/cli_input.py
similarity index 97%
rename from dbt_remote/src/dbt_remote/cli_input.py
rename to dbt_remote/src/cli_input.py
index d41e63df..e49fe0aa 100644
--- a/dbt_remote/src/dbt_remote/cli_input.py
+++ b/dbt_remote/src/cli_input.py
@@ -9,8 +9,8 @@
 from dbt.contracts.graph.manifest import Manifest
 from dbt.parser.manifest import write_manifest
 
-from src.dbt_remote.cli_local_config import LocalCliConfig
-from src.dbt_remote.dbt_server_detector import detect_dbt_server_uri
+from src.cli_local_config import LocalCliConfig
+from src.dbt_server_detector import detect_dbt_server_uri
 
 
 @dataclass
diff --git a/dbt_remote/src/dbt_remote/cli_local_config.py b/dbt_remote/src/cli_local_config.py
similarity index 100%
rename from dbt_remote/src/dbt_remote/cli_local_config.py
rename to dbt_remote/src/cli_local_config.py
diff --git a/dbt_remote/src/dbt_remote/cli_params.py b/dbt_remote/src/cli_params.py
similarity index 98%
rename from dbt_remote/src/dbt_remote/cli_params.py
rename to dbt_remote/src/cli_params.py
index 86965d16..a9458b8b 100644
--- a/dbt_remote/src/dbt_remote/cli_params.py
+++ b/dbt_remote/src/cli_params.py
@@ -8,6 +8,7 @@
 
 project_dir = click.option(
     '--project-dir',
+    envvar='PROJECT_DIR',
     help='Which directory to look in for the dbt_project.yml file. Default is the current directory.'
 )
 
diff --git a/dbt_remote/src/dbt_remote/__init__.py b/dbt_remote/src/dbt_remote/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/dbt_remote/src/dbt_remote/dbt_server.py b/dbt_remote/src/dbt_server.py
similarity index 99%
rename from dbt_remote/src/dbt_remote/dbt_server.py
rename to dbt_remote/src/dbt_server.py
index 8eae34e3..8b916fd3 100644
--- a/dbt_remote/src/dbt_remote/dbt_server.py
+++ b/dbt_remote/src/dbt_server.py
@@ -12,7 +12,7 @@
 from pydantic import BaseModel
 from termcolor import colored
 
-from src.dbt_remote.cli_input import CliInput
+from src.cli_input import CliInput
 
 
 @dataclass
diff --git a/dbt_remote/src/dbt_remote/dbt_server_detector.py b/dbt_remote/src/dbt_server_detector.py
similarity index 100%
rename from dbt_remote/src/dbt_remote/dbt_server_detector.py
rename to dbt_remote/src/dbt_server_detector.py
diff --git a/dbt_remote/src/dbt_remote/dbt_server_image.py b/dbt_remote/src/dbt_server_image.py
similarity index 86%
rename from dbt_remote/src/dbt_remote/dbt_server_image.py
rename to dbt_remote/src/dbt_server_image.py
index c64cbfcd..fbbc6866 100644
--- a/dbt_remote/src/dbt_remote/dbt_server_image.py
+++ b/dbt_remote/src/dbt_server_image.py
@@ -1,4 +1,3 @@
-import json
 import subprocess
 from pathlib import Path
 
@@ -9,8 +8,7 @@ def __init__(self, location: str, artifact_registry: str):
         self.artifact_registry = artifact_registry
 
     def submit(self):
-        site_packages_path = Path(__file__).parents[3]  # /Users/.../dbt_remote
-        dbt_server_dir = site_packages_path / "dbt_server"
+        dbt_server_dir = Path(__file__).parents[2] / "dbt_server"
 
         command = f"gcloud builds submit {dbt_server_dir} --region={self.location} --tag {self.artifact_registry}/server-image"
         process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
diff --git a/dbt_server/dbt_server.py b/dbt_server/dbt_server.py
index c3e9cc02..8011084c 100644
--- a/dbt_server/dbt_server.py
+++ b/dbt_server/dbt_server.py
@@ -1,10 +1,8 @@
 import os
-import time
 import traceback
-from typing import List, Optional
 
 import uvicorn
-from fastapi import Depends, FastAPI, File, Form, HTTPException, UploadFile, status
+from fastapi import Depends, FastAPI, HTTPException, status
 
 from lib.dbt_cloud_run_job import DbtCloudRunJobStarter, DbtCloudRunJobConfig, DbtCloudRunJobCreationFailed, DbtCloudRunJobStartFailed
 from lib.dbt_command import DbtCommand
diff --git a/dbt_server/tests/conftest.py b/dbt_server/tests/conftest.py
deleted file mode 100644
index f600af28..00000000
--- a/dbt_server/tests/conftest.py
+++ /dev/null
@@ -1,49 +0,0 @@
-from pytest import fixture
-from unittest.mock import Mock
-
-
-@fixture
-def MockCloudStorage():
-    mock_blob_size = Mock(return_value=10)
-    mock_blob = Mock(name="mock_blob")
-    mock_blob.upload_from_string.return_value = None
-    mock_blob.download_as_bytes.return_value = b'hello world'
-    mock_blob.name = 'test'
-    mock_blob.len.return_value = 10
-    mock_blob.size = mock_blob_size()
-
-    mock_bucket = Mock(name="mock_bucket")
-    mock_bucket.blob.return_value = mock_blob
-    mock_bucket.get_blob.return_value = mock_blob
-
-    mock_gcs_client = Mock(name="mock_gcs_client")
-    mock_gcs_client.get_bucket.return_value = mock_bucket
-    mock_gcs_client.bucket.return_value = mock_bucket
-    mock_gcs_client.list_blobs.return_value = [mock_blob]
-
-    return mock_gcs_client, mock_bucket, mock_blob, mock_blob_size
-
-
-@fixture
-def MockState():
-    mock_get = Mock(name='mock_get')
-    mock_get.to_dict.return_value = {"log_starting_byte": 0}
-
-    mock_document = Mock(name="mock_document")
-    mock_document.set.return_value = None
-    mock_document.get.return_value = mock_get
-
-    mock_dbt_collection = Mock(name="mock_dbt_collection")
-    mock_dbt_collection.document.return_value = mock_document
-
-    return mock_dbt_collection, mock_document
-
-
-@fixture
-def MockLogging():
-    mock_logger = Mock(name="mock_logger")
-
-    mock_logging = Mock(name="mock_logging")
-    mock_logging.logger.return_value = mock_logger
-
-    return mock_logging
diff --git a/dbt_server/tests/integration/dbt_project/dbt_project.yml b/dbt_server/tests/integration/dbt_project/dbt_project.yml
deleted file mode 100644
index 7b566526..00000000
--- a/dbt_server/tests/integration/dbt_project/dbt_project.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-
-name: 'dbt_server_integration_test'
-version: '1.0.0'
-config-version: 2
-
-profile: 'dbt_server_integration_test'
-
-model-paths: ["models"]
-analysis-paths: ["analyses"]
-test-paths: ["tests"]
-seed-paths: ["seeds"]
-macro-paths: ["macros"]
-snapshot-paths: ["snapshots"]
-
-clean-targets:
-  - "target"
-  - "dbt_packages"
-
-models:
-  alexis:
-    example:
-      +materialized: view
diff --git a/dbt_server/tests/integration/dbt_project/models/example/my_first_dbt_model.sql b/dbt_server/tests/integration/dbt_project/models/example/my_first_dbt_model.sql
deleted file mode 100644
index 764ed8df..00000000
--- a/dbt_server/tests/integration/dbt_project/models/example/my_first_dbt_model.sql
+++ /dev/null
@@ -1,27 +0,0 @@
-
-/*
-    Welcome to your first dbt model!
-    Did you know that you can also configure models directly within SQL files?
-    This will override configurations stated in dbt_project.yml
-
-    Try changing "table" to "view" below
-*/
-
-{{ config(materialized='table') }}
-
-with source_data as (
-
-    select 1
-    union all
-    select null as id
-
-)
-
-select *
-from source_data
-
-/*
-    Uncomment the line below to remove records with null `id` values
-*/
-
--- where id is not null
diff --git a/dbt_server/tests/integration/dbt_project/models/example/my_second_dbt_model.sql b/dbt_server/tests/integration/dbt_project/models/example/my_second_dbt_model.sql
deleted file mode 100644
index c91f8793..00000000
--- a/dbt_server/tests/integration/dbt_project/models/example/my_second_dbt_model.sql
+++ /dev/null
@@ -1,6 +0,0 @@
-
--- Use the `ref` function to select from other models
-
-select *
-from {{ ref('my_first_dbt_model') }}
-where id = 1
diff --git a/dbt_server/tests/integration/dbt_project/models/example/schema.yml b/dbt_server/tests/integration/dbt_project/models/example/schema.yml
deleted file mode 100644
index 2a530817..00000000
--- a/dbt_server/tests/integration/dbt_project/models/example/schema.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-
-version: 2
-
-models:
-  - name: my_first_dbt_model
-    description: "A starter dbt model"
-    columns:
-      - name: id
-        description: "The primary key for this table"
-        tests:
-          - unique
-          - not_null
-
-  - name: my_second_dbt_model
-    description: "A starter dbt model"
-    columns:
-      - name: id
-        description: "The primary key for this table"
-        tests:
-          - unique
-          - not_null
diff --git a/dbt_server/tests/integration/dbt_project/seeds/test_seed.csv b/dbt_server/tests/integration/dbt_project/seeds/test_seed.csv
deleted file mode 100644
index cfa20f81..00000000
--- a/dbt_server/tests/integration/dbt_project/seeds/test_seed.csv
+++ /dev/null
@@ -1,2 +0,0 @@
-a,b
-1,2
diff --git a/dbt_server/tests/integration/test_dbt_server.py b/dbt_server/tests/integration/test_dbt_server.py
deleted file mode 100644
index f73018cf..00000000
--- a/dbt_server/tests/integration/test_dbt_server.py
+++ /dev/null
@@ -1,63 +0,0 @@
-import base64
-from pathlib import Path
-from subprocess import check_output
-from time import sleep
-from dbt_server.dbt_server import app
-from fastapi.testclient import TestClient
-
-client = TestClient(app)
-
-def test_check_endpoint():
-    response = client.get("/check")
-    assert response.status_code == 200
-    assert "response" in response.json()
-    assert "dbt-server" in response.json()["response"]
-
-def test_dbt_endpoint():
-    profiles_path = Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt"
-    dbt_project_path = Path(__file__).parent / 'dbt_project'
-    check_output(f"cd {dbt_project_path} && dbt compile", shell=True)
-
-    manifest = read_file_as_b64(dbt_project_path / "target" / "manifest.json")
-    dbt_project_str = read_file_as_b64(dbt_project_path / "dbt_project.yml")
-    profiles_str = read_file_as_b64(profiles_path / "profiles.yml")
-    seeds_str = read_file_as_b64(dbt_project_path / "seeds" / "test_seed.csv")
-
-    dbt_command = {
-        "server_url": "http://does_not_matter:8000/",
-        "user_command": "run",
-        "manifest": manifest,
-        "dbt_project": dbt_project_str,
-        "profiles": profiles_str,
-        "seeds": {"seed": seeds_str},
-    }
-    response = client.post("/dbt", json=dbt_command)
-
-    assert response.status_code == 202
-    assert "uuid" in response.json()
-    assert "links" in response.json()
-
-    uuid = response.json()['uuid']
-
-    status = client.get(f"/job/{uuid}").json()['run_status']
-    while status in ["pending", "running"]:
-        response = client.get(f"/job/{uuid}/last_logs")
-        assert response.status_code == 200
-        assert "run_logs" in response.json()
-        assert isinstance(response.json()['run_logs'], list)
-        status = client.get(f"/job/{uuid}").json()['run_status']
-        sleep(3)
-
-
-    response = client.get(f"/job/{uuid}/logs")
-    assert response.status_code == 200
-    assert "run_logs" in response.json()
-    assert isinstance(response.json()['run_logs'], list)
-    assert "Command successfully executed" in " ".join(response.json()['run_logs'])
-
-def read_file_as_b64(filename) -> str:
-    with open(filename, 'r') as f:
-        file_str = f.read()
-    file_bytes = base64.b64encode(bytes(file_str, 'ascii'))
-    file_str = file_bytes.decode('ascii')
-    return file_str
diff --git a/dbt_server/tests/unit/test_placholder.py b/dbt_server/tests/unit/test_placholder.py
deleted file mode 100644
index a5a060d7..00000000
--- a/dbt_server/tests/unit/test_placholder.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# Ensures CI is running ok
-
-def test_ci():
-    assert True
diff --git a/poetry.lock b/poetry.lock
index 47c6670d..d3a274d8 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
 
 [[package]]
 name = "agate"
@@ -25,24 +25,24 @@ test = ["PyICU (>=2.4.2)", "coverage (>=3.7.1)", "cssselect (>=0.9.1)", "lxml (>
 
 [[package]]
 name = "anyio"
-version = "3.7.1"
+version = "4.0.0"
 description = "High level compatibility layer for multiple asynchronous event loop implementations"
 optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
 files = [
-    {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"},
-    {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"},
+    {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"},
+    {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"},
 ]
 
 [package.dependencies]
-exceptiongroup = {version = "*", markers = "python_version < \"3.11\""}
+exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
 idna = ">=2.8"
 sniffio = ">=1.1"
 
 [package.extras]
-doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"]
-test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
-trio = ["trio (<0.22)"]
+doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"]
+test = ["anyio[trio]", "coverage[toml] (>=7)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
+trio = ["trio (>=0.22)"]
 
 [[package]]
 name = "attrs"
@@ -420,20 +420,19 @@ test = ["pytest (>=6)"]
 
 [[package]]
 name = "fastapi"
-version = "0.104.1"
+version = "0.103.0"
 description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
 optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.7"
 files = [
-    {file = "fastapi-0.104.1-py3-none-any.whl", hash = "sha256:752dc31160cdbd0436bb93bad51560b57e525cbb1d4bbf6f4904ceee75548241"},
-    {file = "fastapi-0.104.1.tar.gz", hash = "sha256:e5e4540a7c5e1dcfbbcf5b903c234feddcdcd881f191977a1c5dfd917487e7ae"},
+    {file = "fastapi-0.103.0-py3-none-any.whl", hash = "sha256:61ab72c6c281205dd0cbaccf503e829a37e0be108d965ac223779a8479243665"},
+    {file = "fastapi-0.103.0.tar.gz", hash = "sha256:4166732f5ddf61c33e9fa4664f73780872511e0598d4d5434b1816dc1e6d9421"},
 ]
 
 [package.dependencies]
-anyio = ">=3.7.1,<4.0.0"
 pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0"
 starlette = ">=0.27.0,<0.28.0"
-typing-extensions = ">=4.8.0"
+typing-extensions = ">=4.5.0"
 
 [package.extras]
 all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
@@ -453,12 +452,12 @@ files = [
 google-auth = ">=2.14.1,<3.0.dev0"
 googleapis-common-protos = ">=1.56.2,<2.0.dev0"
 grpcio = [
+    {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""},
     {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
-    {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
 ]
 grpcio-status = [
+    {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""},
     {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
-    {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
 ]
 protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0"
 requests = ">=2.18.0,<3.0.0.dev0"
@@ -505,8 +504,8 @@ files = [
 [package.dependencies]
 google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]}
 proto-plus = [
-    {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
     {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
+    {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
 ]
 protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
 
@@ -541,8 +540,8 @@ google-api-core = {version = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev", extras =
 google-cloud-core = ">=1.6.0,<3.0.0dev"
 google-resumable-media = ">=0.6.0,<3.0dev"
 grpcio = [
-    {version = ">=1.49.1,<2.0dev", markers = "python_version >= \"3.11\""},
     {version = ">=1.47.0,<2.0dev", markers = "python_version < \"3.11\""},
+    {version = ">=1.49.1,<2.0dev", markers = "python_version >= \"3.11\""},
 ]
 packaging = ">=20.0.0"
 proto-plus = ">=1.15.0,<2.0.0dev"
@@ -562,21 +561,21 @@ tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"]
 
 [[package]]
 name = "google-cloud-build"
-version = "3.20.1"
+version = "3.21.0"
 description = "Google Cloud Build API client library"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "google-cloud-build-3.20.1.tar.gz", hash = "sha256:4588bfe36638c54dfb752f6cf6a0b216acd2f98c1dacde9e5e820e0c1b1eba1b"},
-    {file = "google_cloud_build-3.20.1-py2.py3-none-any.whl", hash = "sha256:b11112cea6b700902d2a52deee27fba172d9244dbc2ba5d51bbfcd146b5f28f4"},
+    {file = "google-cloud-build-3.21.0.tar.gz", hash = "sha256:97a41d6f54ac58a77f0e54050c4094de5f418857d3e30ed51a371bbca75d016f"},
+    {file = "google_cloud_build-3.21.0-py2.py3-none-any.whl", hash = "sha256:b22d07a64dc7be07e511c5f6fb35ec61c35965e9525210a1f0b4a4de7dbdf153"},
 ]
 
 [package.dependencies]
 google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]}
 grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev"
 proto-plus = [
-    {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
     {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
+    {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
 ]
 protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
 
@@ -600,21 +599,21 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)"]
 
 [[package]]
 name = "google-cloud-dataproc"
-version = "5.6.0"
+version = "5.7.0"
 description = "Google Cloud Dataproc API client library"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "google-cloud-dataproc-5.6.0.tar.gz", hash = "sha256:5fa9dfb88433c2d0c138b7ebb6ade174103be3762ccd039b24d53c7448e3760d"},
-    {file = "google_cloud_dataproc-5.6.0-py2.py3-none-any.whl", hash = "sha256:d76a7791d5b81b498cf353be4f14a2194a696fdd1a41a69b33ac3319c5a8a213"},
+    {file = "google-cloud-dataproc-5.7.0.tar.gz", hash = "sha256:086229a0f987448bb92029e166212acef85d4e8e5d94f2822add21b382bf1c04"},
+    {file = "google_cloud_dataproc-5.7.0-py2.py3-none-any.whl", hash = "sha256:599436545f7091eda2fafe194ca657d302512064009d685c2ff3f0cd7c08deaf"},
 ]
 
 [package.dependencies]
 google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]}
 grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev"
 proto-plus = [
-    {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
     {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
+    {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
 ]
 protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
 
@@ -633,8 +632,8 @@ files = [
 google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]}
 google-cloud-core = ">=1.4.1,<3.0.0dev"
 proto-plus = [
-    {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
     {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
+    {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
 ]
 protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
 
@@ -656,8 +655,8 @@ google-cloud-audit-log = ">=0.1.0,<1.0.0dev"
 google-cloud-core = ">=2.0.0,<3.0.0dev"
 grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev"
 proto-plus = [
-    {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
     {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
+    {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
 ]
 protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
 
@@ -676,8 +675,8 @@ files = [
 google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]}
 grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev"
 proto-plus = [
-    {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
     {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
+    {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
 ]
 protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
 
@@ -1555,6 +1554,20 @@ files = [
 [package.dependencies]
 six = ">=1.5"
 
+[[package]]
+name = "python-multipart"
+version = "0.0.6"
+description = "A streaming multipart parser for Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "python_multipart-0.0.6-py3-none-any.whl", hash = "sha256:ee698bab5ef148b0a760751c261902cd096e57e10558e11aca17646b74ee1c18"},
+    {file = "python_multipart-0.0.6.tar.gz", hash = "sha256:e9925a80bb668529f1b67c7fdb0a5dacdd7cbfc6fb0bff3ea443fe22bdd62132"},
+]
+
+[package.extras]
+dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==1.7.3)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"]
+
 [[package]]
 name = "python-slugify"
 version = "8.0.1"
@@ -1985,4 +1998,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
 [metadata]
 lock-version = "2.0"
 python-versions = ">= 3.10, < 3.12"
-content-hash = "f9fbed1827b10cd54cc9f09d069a5e5ace571ea397200ac9e72ea22d9283a478"
+content-hash = "a38ab14d4b716b1ff01d6f3f5363eb3b1f93e9acc4a7b99d3f8e56c7b896e60d"
diff --git a/pyproject.toml b/pyproject.toml
index b6fc4b89..d47cd926 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -21,6 +21,7 @@ dbt-bigquery = "^1.6"
 click-aliases = "^1"
 termcolor = "^2"
 google-cloud-build = "^3"
+python-multipart = "^0"
 
 # dbt-server dependencies
 google-cloud-logging = "^3"
@@ -35,9 +36,7 @@ requests-mock = "^1"
 pyfakefs = "^5"
 
 [tool.poetry.scripts]
-dbt-remote = 'dbt_remote.src.dbt_remote.cli:cli'
+dbt-remote = "dbt_remote.cli:cli"
 
 [tool.pytest.ini_options]
-pythonpath = [
-  ".", "dbt_remote", "dbt_remote/src", "dbt_remote/src/dbt_remote", "dbt_server"
-]
+pythonpath = [".", "dbt_remote", "dbt_server"]
diff --git a/tests/test_end_to_end.py b/tests/test_end_to_end.py
index fac387f2..173c9a37 100644
--- a/tests/test_end_to_end.py
+++ b/tests/test_end_to_end.py
@@ -1,14 +1,17 @@
 import os
+from pathlib import Path
 from typing import List
 from uuid import uuid4
 from datetime import datetime
 
 from click.testing import CliRunner
-from dbt_remote.src.dbt_remote.cli import cli
+from dbt_remote.cli import cli
 import pytest
 from google.cloud.devtools.cloudbuild_v1 import CloudBuildClient, ListBuildsRequest
 
 os.environ["LOCAL"] = "true"
+os.environ["PROJECT_DIR"] = str(Path(__file__).parent / "dbt_project")
+
 os.environ["SERVICE_ACCOUNT"] = f"dbt-server-service-account@{os.environ['PROJECT_ID']}.iam.gserviceaccount.com"
 os.environ["BUCKET_NAME"] = f"{os.environ['PROJECT_ID']}-dbt-server"
 os.environ["DOCKER_IMAGE"] = f"{os.environ['LOCATION']}-docker.pkg.dev/{os.environ['PROJECT_ID']}/dbt-server-repository/server-image"
@@ -17,22 +20,22 @@
 os.environ["UUID"] = str(uuid4())
 
 
-def test_image_submit():
-    start_time = datetime.utcnow()
-    result = run_command("dbt-remote image submit")
-    assert result.exit_code == 0
+# def test_image_submit():
+#     start_time = datetime.utcnow()
+#     result = run_command("dbt-remote image submit")
+#     assert result.exit_code == 0
 
-    client = CloudBuildClient()
-    request = ListBuildsRequest(
-        parent=f"projects/{os.environ['PROJECT_ID']}/locations/{os.environ['LOCATION']}",
-        project_id=os.environ["PROJECT_ID"],
-        filter=f"images={os.environ['DOCKER_IMAGE']}"
-    )
-    response = client.list_builds(request=request)
-    latest_build = next(iter(response), None)
+#     client = CloudBuildClient()
+#     request = ListBuildsRequest(
+#         parent=f"projects/{os.environ['PROJECT_ID']}/locations/{os.environ['LOCATION']}",
+#         project_id=os.environ["PROJECT_ID"],
+#         filter=f"images={os.environ['DOCKER_IMAGE']}"
+#     )
+#     response = client.list_builds(request=request)
+#     latest_build = next(iter(response), None)
 
-    assert latest_build.status.name == "SUCCESS"
-    assert str(latest_build.create_time) > str(start_time)
+#     assert latest_build.status.name == "SUCCESS"
+#     assert str(latest_build.create_time) > str(start_time)
 
 @pytest.mark.parametrize("command, expected_in_output", [
     (