Skip to content

Commit

Permalink
fix(version upgrade): Make fixes to upgrade to v4.1.1 (#44)
Browse files Browse the repository at this point in the history
* Fixes to upgrade to v4.1.1
  • Loading branch information
kgopal492 authored Feb 4, 2025
1 parent e96f17d commit bfc4be1
Show file tree
Hide file tree
Showing 12 changed files with 19 additions and 55 deletions.
36 changes: 0 additions & 36 deletions .github/workflows/dependency-review.yml

This file was deleted.

3 changes: 2 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -161,10 +161,11 @@ RUN apt-get update -qq \
# Cache everything for dev purposes...

COPY --chown=superset:superset requirements/development.txt requirements/
COPY --chown=superset:superset requirements/local.txt requirements/
RUN --mount=type=cache,target=/root/.cache/pip \
apt-get update -qq && apt-get install -yqq --no-install-recommends \
build-essential \
&& pip install -r requirements/development.txt \
&& pip install -r requirements/local.txt \
&& apt-get autoremove -yqq --purge build-essential \
&& rm -rf /var/lib/apt/lists/*

Expand Down
2 changes: 1 addition & 1 deletion docker-compose-image-tag.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
# create you own docker environment file (docker/.env) with your own
# unique random secure passwords and SECRET_KEY.
# -----------------------------------------------------------------------
x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest-dev}
x-superset-image: &superset-image superset:latest
x-superset-depends-on: &superset-depends-on
- db
- redis
Expand Down
3 changes: 3 additions & 0 deletions requirements/local.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
-r development.txt
-e file:.

Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
"lib"
],
"dependencies": {
"@types/escape-html": "^1.0.4",
"d3-array": "^1.2.0",
"echarts": "^5.4.1",
"lodash": "^4.17.21",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import React from 'react';
import { t, validateNonEmpty } from '@superset-ui/core';
import {
formatSelectOptions,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,8 @@ const formData: EchartsMixedTimeseriesFormData = {
forecastPeriods: [],
forecastInterval: 0,
forecastSeasonalityDaily: 0,
pinterestDeltaTable: false,
pinterestDeltaTableColumns: [],
};

const queriesData = [
Expand Down
1 change: 1 addition & 0 deletions superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,7 @@ def _try_json_readsha(filepath: str, length: int) -> str | None:
SQLALCHEMY_DATABASE_URI = (
f"""sqlite:///{os.path.join(DATA_DIR, "superset.db")}?check_same_thread=false"""
)
ALEMBIC_SQLALCHEMY_DATABASE_URI = None

# SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp'
# SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp'
Expand Down
5 changes: 4 additions & 1 deletion superset/migrations/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,10 @@
fileConfig(config.config_file_name)
logger = logging.getLogger("alembic.env")

DATABASE_URI = current_app.config["SQLALCHEMY_DATABASE_URI"]
DATABASE_URI = (
current_app.config["ALEMBIC_SQLALCHEMY_DATABASE_URI"]
or current_app.config["SQLALCHEMY_DATABASE_URI"]
)
if "sqlite" in DATABASE_URI:
logger.warning(
"SQLite Database support for metadata databases will \
Expand Down
4 changes: 0 additions & 4 deletions superset/tasks/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.tags.models import Tag, TaggedObject
from superset.tasks.utils import fetch_csrf_token
from superset.utils import json
from superset.utils.date_parser import parse_human_datetime
from superset.utils.urls import get_url_path
Expand Down Expand Up @@ -271,9 +270,6 @@ def fetch_url(data: str, headers: dict[str, str]) -> dict[str, str]:
"""
result = {}
try:
# Fetch CSRF token for API request
headers.update(fetch_csrf_token(headers))

url = get_url_path("ChartRestApi.warm_up_cache")
logger.info("Fetching %s with payload %s", url, data)
req = request.Request(
Expand Down
7 changes: 3 additions & 4 deletions superset/tasks/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,16 +35,15 @@ def db_tables_cache_warm_up(database_id: str, schema_name: str) -> None:
logger.error("Database not found, database_id: %i", database_id)

database.get_all_table_names_in_schema(
catalog=None,
schema=schema_name,
force=True,
cache=database.table_cache_enabled,
cache_timeout=database.table_cache_timeout,
)
database.get_all_view_names_in_schema(
schema=schema_name,
force=True,
cache=database.table_cache_enabled,
cache_timeout=database.table_cache_timeout,
None,
schema_name,
)
logger.info(
"Database tables cache warm up succeeded for database_id: %i, schema_name: %s", # pylint: disable=line-too-long
Expand Down
9 changes: 2 additions & 7 deletions tests/integration_tests/tasks/test_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,9 @@
],
ids=["Without trailing slash", "With trailing slash"],
)
@mock.patch("superset.tasks.cache.fetch_csrf_token")
@mock.patch("superset.tasks.cache.request.Request")
@mock.patch("superset.tasks.cache.request.urlopen")
def test_fetch_url(mock_urlopen, mock_request_cls, mock_fetch_csrf_token, base_url):
def test_fetch_url(mock_urlopen, mock_request_cls, base_url):
from superset.tasks.cache import fetch_url

mock_request = mock.MagicMock()
Expand All @@ -42,21 +41,17 @@ def test_fetch_url(mock_urlopen, mock_request_cls, mock_fetch_csrf_token, base_u
mock_urlopen.return_value.code = 200

initial_headers = {"Cookie": "cookie", "key": "value"}
csrf_headers = initial_headers | {"X-CSRF-Token": "csrf_token"}
mock_fetch_csrf_token.return_value = csrf_headers

app.config["WEBDRIVER_BASEURL"] = base_url
data = "data"
data_encoded = b"data"

result = fetch_url(data, initial_headers)

assert data == result["success"]
mock_fetch_csrf_token.assert_called_once_with(initial_headers)
mock_request_cls.assert_called_once_with(
"http://base-url/api/v1/chart/warm_up_cache",
data=data_encoded,
headers=csrf_headers,
headers=initial_headers,
method="PUT",
)
# assert the same Request object is used
Expand Down

0 comments on commit bfc4be1

Please sign in to comment.