Remove extensions mechanism (#5895)

* Remove extensions mechanism.

* Missing change.
This commit is contained in:
Arik Fraimovich
2023-03-20 07:39:21 -07:00
committed by GitHub
parent 0dfe726ec8
commit 28b0a2379d
26 changed files with 42 additions and 503 deletions

View File

@@ -9,10 +9,8 @@ build-docker-image-job: &build-docker-image-job
- checkout
- run: sudo apt update
- run: sudo apt install python3-pip
- run: sudo pip3 install -r requirements_bundles.txt
- run: .circleci/update_version
- run: sudo npm install --global --force yarn@1.22.10
- run: yarn bundle
- run: .circleci/docker_build
jobs:
backend-lint:
@@ -83,10 +81,8 @@ jobs:
- checkout
- run: sudo apt update
- run: sudo apt install python3-pip
- run: sudo pip3 install -r requirements_bundles.txt
- run: sudo npm install --global --force yarn@1.22.10
- run: yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
- run: yarn bundle
- run:
name: Run App Tests
command: yarn test

View File

@@ -39,46 +39,46 @@ RUN useradd --create-home redash
# Ubuntu packages
RUN apt-get update && \
apt-get install -y --no-install-recommends \
curl \
gnupg \
build-essential \
pwgen \
libffi-dev \
sudo \
git-core \
# Postgres client
libpq-dev \
# ODBC support:
g++ unixodbc-dev \
# for SAML
xmlsec1 \
# Additional packages required for data sources:
libssl-dev \
default-libmysqlclient-dev \
freetds-dev \
libsasl2-dev \
unzip \
libsasl2-modules-gssapi-mit && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
curl \
gnupg \
build-essential \
pwgen \
libffi-dev \
sudo \
git-core \
# Postgres client
libpq-dev \
# ODBC support:
g++ unixodbc-dev \
# for SAML
xmlsec1 \
# Additional packages required for data sources:
libssl-dev \
default-libmysqlclient-dev \
freetds-dev \
libsasl2-dev \
unzip \
libsasl2-modules-gssapi-mit && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
ARG TARGETPLATFORM
ARG databricks_odbc_driver_url=https://databricks.com/wp-content/uploads/2.6.10.1010-2/SimbaSparkODBC-2.6.10.1010-2-Debian-64bit.zip
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
&& curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list \
&& apt-get update \
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql17 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
&& chmod 600 /tmp/simba_odbc.zip \
&& unzip /tmp/simba_odbc.zip -d /tmp/ \
&& dpkg -i /tmp/SimbaSparkODBC-*/*.deb \
&& printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini \
&& rm /tmp/simba_odbc.zip \
&& rm -rf /tmp/SimbaSparkODBC*; fi
curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
&& curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list \
&& apt-get update \
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql17 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
&& chmod 600 /tmp/simba_odbc.zip \
&& unzip /tmp/simba_odbc.zip -d /tmp/ \
&& dpkg -i /tmp/SimbaSparkODBC-*/*.deb \
&& printf "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini \
&& rm /tmp/simba_odbc.zip \
&& rm -rf /tmp/SimbaSparkODBC*; fi
WORKDIR /app
@@ -93,7 +93,7 @@ RUN pip install pip==20.2.4;
COPY requirements_all_ds.txt ./
RUN if [ "x$skip_ds_deps" = "x" ] ; then pip install -r requirements_all_ds.txt ; else echo "Skipping pip install -r requirements_all_ds.txt" ; fi
COPY requirements_bundles.txt requirements_dev.txt ./
COPY requirements_dev.txt ./
RUN if [ "x$skip_dev_deps" = "x" ] ; then pip install -r requirements_dev.txt ; fi
COPY requirements.txt ./

View File

@@ -1,4 +1,4 @@
.PHONY: compose_build up test_db create_database clean down bundle tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
.PHONY: compose_build up test_db create_database clean down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
compose_build:
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose build
@@ -22,9 +22,6 @@ clean:
down:
docker-compose down
bundle:
docker-compose run server bin/bundle-extensions
tests:
docker-compose run server tests
@@ -34,20 +31,19 @@ lint:
backend-unit-tests: up test_db
docker-compose run --rm --name tests server tests
frontend-unit-tests: bundle
frontend-unit-tests:
CYPRESS_INSTALL_BINARY=0 PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 yarn --frozen-lockfile
yarn bundle
yarn test
test: lint backend-unit-tests frontend-unit-tests
build: bundle
build:
yarn build
watch: bundle
watch:
yarn watch
start: bundle
start:
yarn start
redis-cli:

View File

@@ -1,126 +0,0 @@
#!/usr/bin/env python3
"""Copy bundle extension files to the client/app/extension directory"""
import logging
import os
from pathlib import Path
from shutil import copy
from collections import OrderedDict as odict
import importlib_metadata
import importlib_resources
# Name of the subdirectory
BUNDLE_DIRECTORY = "bundle"
logger = logging.getLogger(__name__)
# Make a directory for extensions and set it as an environment variable
# to be picked up by webpack.
extensions_relative_path = Path("client", "app", "extensions")
extensions_directory = Path(__file__).parent.parent / extensions_relative_path
if not extensions_directory.exists():
extensions_directory.mkdir()
os.environ["EXTENSIONS_DIRECTORY"] = str(extensions_relative_path)
def entry_point_module(entry_point):
"""Returns the dotted module path for the given entry point"""
return entry_point.pattern.match(entry_point.value).group("module")
def load_bundles():
""""Load bundles as defined in Redash extensions.
The bundle entry point can be defined as a dotted path to a module
or a callable, but it won't be called but just used as a means
to find the files under its file system path.
The name of the directory it looks for files in is "bundle".
So a Python package with an extension bundle could look like this::
my_extensions/
├── __init__.py
└── wide_footer
├── __init__.py
└── bundle
├── extension.js
└── styles.css
and would then need to register the bundle with an entry point
under the "redash.bundles" group, e.g. in your setup.py::
setup(
# ...
entry_points={
"redash.bundles": [
"wide_footer = my_extensions.wide_footer",
]
# ...
},
# ...
)
"""
bundles = odict()
# HACK:
# bin/bundle-extensions is tested in different versions.
# circleci frontend-unit-tests: python 3.5 and importlib-metadata-2.1.3
# circleci backend-unit-tests: python 3.7 and importlib-metadata-5.0.0
if importlib_metadata.version("importlib_metadata") >= "5.0.0":
bundles_entry_points = importlib_metadata.entry_points(group="redash.bundles")
else:
bundles_entry_points = importlib_metadata.entry_points().get(
"redash.bundles", []
)
for entry_point in bundles_entry_points:
logger.info('Loading Redash bundle "%s".', entry_point.name)
module = entry_point_module(entry_point)
# Try to get a list of bundle files
try:
bundle_dir = importlib_resources.files(module).joinpath(BUNDLE_DIRECTORY)
except (ImportError, TypeError):
# Module isn't a package, so can't have a subdirectory/-package
logger.error(
'Redash bundle module "%s" could not be imported: "%s"',
entry_point.name,
module,
)
continue
if not bundle_dir.is_dir():
logger.error(
'Redash bundle directory "%s" could not be found or is not a directory: "%s"',
entry_point.name,
bundle_dir,
)
continue
bundles[entry_point.name] = list(bundle_dir.rglob("*"))
return bundles
bundles = load_bundles().items()
if bundles:
print("Number of extension bundles found: {}".format(len(bundles)))
else:
print("No extension bundles found.")
for bundle_name, paths in bundles:
# Shortcut in case not paths were found for the bundle
if not paths:
print('No paths found for bundle "{}".'.format(bundle_name))
continue
# The destination for the bundle files with the entry point name as the subdirectory
destination = Path(extensions_directory, bundle_name)
if not destination.exists():
destination.mkdir()
# Copy the bundle directory from the module to its destination.
print('Copying "{}" bundle to {}:'.format(bundle_name, destination.resolve()))
for src_path in paths:
dest_path = destination / src_path.name
print(" - {} -> {}".format(src_path, dest_path))
copy(str(src_path), str(dest_path))

View File

@@ -5,7 +5,6 @@
"main": "index.js",
"scripts": {
"start": "npm-run-all --parallel watch:viz webpack-dev-server",
"bundle": "bin/bundle-extensions",
"clean": "rm -rf ./client/dist/",
"build:viz": "(cd viz-lib && yarn build:babel)",
"build": "yarn clean && yarn build:viz && NODE_ENV=production webpack",

View File

@@ -25,7 +25,6 @@ class Redash(Flask):
def create_app():
from . import (
authentication,
extensions,
handlers,
limiter,
mail,
@@ -54,7 +53,6 @@ def create_app():
limiter.init_app(app)
handlers.init_app(app)
configure_webpack(app)
extensions.init_app(app)
users.init_app(app)
tasks.init_app(app)

View File

@@ -1,107 +0,0 @@
import logging
from collections import OrderedDict as odict
from importlib_metadata import entry_points
# The global Redash extension registry
extensions = odict()
# The periodic RQ jobs as provided by Redash extensions.
# This is separate from the internal periodic RQ jobs
# since the extension job discovery phase is
# after the configuration has already happened.
periodic_jobs = odict()
extension_logger = logging.getLogger(__name__)
def entry_point_loader(group_name, mapping, logger=None, *args, **kwargs):
"""
Loads the list Python entry points with the given entry point group name
(e.g. "redash.extensions"), calls each with the provided *args/**kwargs
arguments and stores the results in the provided mapping under the name
of the entry point.
If provided, the logger is used for error and debugging statements.
"""
if logger is None:
logger = extension_logger
for entry_point in entry_points(group=group_name):
logger.info('Loading entry point "%s".', entry_point.name)
try:
# Then try to load the entry point (import and getattr)
obj = entry_point.load()
except (ImportError, AttributeError):
# or move on
logger.error(
'Entry point "%s" could not be found.', entry_point.name, exc_info=True
)
continue
if not callable(obj):
logger.error('Entry point "%s" is not a callable.', entry_point.name)
continue
try:
# then simply call the loaded entry point.
mapping[entry_point.name] = obj(*args, **kwargs)
except AssertionError:
logger.error(
'Entry point "%s" cound not be loaded.', entry_point.name, exc_info=True
)
continue
def load_extensions(app):
"""Load the Redash extensions for the given Redash Flask app.
The extension entry point can return any type of value but
must take a Flask application object.
E.g.::
def extension(app):
app.logger.info("Loading the Foobar extenions")
Foobar(app)
"""
entry_point_loader("redash.extensions", extensions, logger=app.logger, app=app)
def load_periodic_jobs(logger=None):
"""Load the periodic jobs as defined in Redash extensions.
The periodic task entry point needs to return a set of parameters
that can be passed to RQ Scheduler API:
https://github.com/rq/rq-scheduler#periodic--repeated-jobs
E.g.::
def add_two_and_two():
return {
"func": add,
"args": [2, 2]
"interval": 10, # in seconds or as a timedelta
}
and then registered with an entry point under the "redash.periodic_jobs"
group, e.g. in your setup.py::
setup(
# ...
entry_points={
"redash.periodic_jobs": [
"add_two_and_two = calculus.addition:add_two_and_two",
]
# ...
},
# ...
)
"""
entry_point_loader("redash.periodic_jobs", periodic_jobs, logger=logger)
def init_app(app):
load_extensions(app)

View File

@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
from rq.job import Job
from rq_scheduler import Scheduler
from redash import extensions, settings, rq_redis_connection, statsd_client
from redash import settings, rq_redis_connection, statsd_client
from redash.tasks import (
sync_user_details,
refresh_queries,
@@ -92,10 +92,6 @@ def periodic_job_definitions():
# Add your own custom periodic jobs in your dynamic_settings module.
jobs.extend(settings.dynamic_settings.periodic_jobs() or [])
# Add periodic jobs that are shipped as part of Redash extensions
extensions.load_periodic_jobs(logger)
jobs.extend(list(extensions.periodic_jobs.values()))
return jobs

View File

@@ -9,7 +9,6 @@ from rq.decorators import job as rq_job
from redash import (
create_app,
extensions,
settings,
redis_connection,
rq_redis_connection,

View File

@@ -59,9 +59,6 @@ sshtunnel==0.1.5
supervisor==4.1.0
supervisor_checks==0.8.1
werkzeug==0.16.1
# Install the dependencies of the bin/bundle-extensions script here.
# It has its own requirements file to simplify the frontend client build process
-r requirements_bundles.txt
# Uncomment the requirement for ldap3 if using ldap.
# It is not included by default because of the GPL license conflict.
# ldap3==2.2.4

View File

@@ -1,8 +0,0 @@
# These are the requirements that the extension bundle
# loading mechanism need on Python 2 and can be removed
# when moved to Python 3.
# It's automatically installed when running npm run bundle
# These can be removed when upgrading to Python 3.x
importlib-metadata>=1.6 # remove when on 3.8
importlib_resources==1.5 # remove when on 3.9

View File

@@ -1,2 +0,0 @@
dist
build

View File

@@ -1,2 +0,0 @@
include README.md
recursive-include redash_dummy *.jsx

View File

@@ -1,22 +0,0 @@
# How to update the dummy extension?
If you'd like to extend the dummy extension, please update the ``setup.py``
file and the ``redash_dummy.py`` module.
Please make sure to regenerate the *.egg-info directory. See below.
# How to generate the redash_dummy.egg-info directory?
The `egg-info` directory is what is usually created in the
site-packages directory when running `pip install <packagename>` and
contains the metadata derived from the `setup.py` file.
In other words, it's auto-generated and you'll need to follow the following
steps to update it (e.g. when extending the extension tests). From the
host computer (assuming the Docker development environment) run:
- `make bash` -- to create container running with Bash and entering it
- `cd tests/extensions/redash-dummy/` -- change the directory to the directory with the dummy extension
- `python setup.py egg_info` -- to create/update the egg-info directory
The egg-info directory is *not* cleaned up by pip, just the link in the `~/.local` site-packages directory.

View File

@@ -1,10 +0,0 @@
Metadata-Version: 1.0
Name: redash-dummy
Version: 0.2
Summary: Redash extensions for testing
Home-page: UNKNOWN
Author: Redash authors
Author-email: UNKNOWN
License: MIT
Description: UNKNOWN
Platform: UNKNOWN

View File

@@ -1,12 +0,0 @@
MANIFEST.in
README.md
setup.py
redash_dummy/__init__.py
redash_dummy/extension.py
redash_dummy/jobs.py
redash_dummy.egg-info/PKG-INFO
redash_dummy.egg-info/SOURCES.txt
redash_dummy.egg-info/dependency_links.txt
redash_dummy.egg-info/entry_points.txt
redash_dummy.egg-info/top_level.txt
redash_dummy/bundle/WideFooter.jsx

View File

@@ -1,13 +0,0 @@
[redash.bundles]
wide_footer = redash_dummy
[redash.extensions]
assertive_extension = redash_dummy.extension:assertive_extension
non_callable_extension = redash_dummy.extension:module_attribute
not_findable_extension = redash_dummy.extension:missing_attribute
not_importable_extension = missing_extension_module:extension
working_extension = redash_dummy.extension:extension
[redash.periodic_jobs]
dummy_periodic_job = redash_dummy.jobs:periodic_job

View File

@@ -1,9 +0,0 @@
import React from "react";
export default function WideFooter() {
return (
<div>
This is a wide footer
</div>
);
}

View File

@@ -1,11 +0,0 @@
module_attribute = "hello!"
def extension(app):
"""This extension will work"""
return "extension loaded"
def assertive_extension(app):
"""This extension won't work"""
assert False

View File

@@ -1,14 +0,0 @@
from datetime import timedelta
def job_callback():
return "result"
def periodic_job(*args, **kwargs):
"""This periodic job will successfully load"""
return {
"func": job_callback,
"timeout": 60,
"interval": timedelta(minutes=1),
}

View File

@@ -1,25 +0,0 @@
from setuptools import setup, find_packages
setup(
name="redash-dummy",
version="0.2",
description="Redash extensions for testing",
author="Redash authors",
license="MIT",
packages=find_packages(),
include_package_data=True,
entry_points={
"redash.extensions": [
"working_extension = redash_dummy.extension:extension",
"non_callable_extension = redash_dummy.extension:module_attribute",
"not_findable_extension = redash_dummy.extension:missing_attribute",
"not_importable_extension = missing_extension_module:extension",
"assertive_extension = redash_dummy.extension:assertive_extension",
],
"redash.periodic_jobs": ["dummy_periodic_job = redash_dummy.jobs:periodic_job"],
"redash.bundles": [
"wide_footer = redash_dummy",
],
},
)

View File

@@ -1,79 +0,0 @@
import logging
import shutil
import subprocess
import sys
from pathlib import Path
from redash import extensions
from redash.tasks import periodic_job_definitions
from tests import BaseTestCase
logger = logging.getLogger(__name__)
dummy_extension = "redash-dummy"
this_dir = Path(__file__).parent.resolve()
app_dir = this_dir.parent.parent
dummy_path = str(this_dir / dummy_extension)
test_bundle = (
app_dir / "client" / "app" / "extensions" / "wide_footer" / "WideFooter.jsx"
)
class TestExtensions(BaseTestCase):
@classmethod
def setUpClass(cls):
sys.path.insert(0, dummy_path)
@classmethod
def tearDownClass(cls):
sys.path.remove(dummy_path)
def test_working_extension(self):
self.assertIn("working_extension", extensions.extensions.keys())
self.assertEqual(
extensions.extensions.get("working_extension"), "extension loaded"
)
def test_assertive_extension(self):
self.assertNotIn("assertive_extension", extensions.extensions.keys())
def test_not_findable_extension(self):
self.assertNotIn("not_findable_extension", extensions.extensions.keys())
def test_not_importable_extension(self):
self.assertNotIn("not_importable_extension", extensions.extensions.keys())
def test_non_callable_extension(self):
self.assertNotIn("non_callable_extension", extensions.extensions.keys())
def test_dummy_periodic_task(self):
# need to load the periodic tasks manually since this isn't
# done automatically on test suite start but only part of
# the worker configuration
extensions.load_periodic_jobs(logger)
self.assertIn("dummy_periodic_job", extensions.periodic_jobs.keys())
def test_dummy_periodic_task_definitions(self):
jobs = periodic_job_definitions()
from redash_dummy.jobs import job_callback
self.assertIn(job_callback, [job.get("func", None) for job in jobs])
class TestBundles(BaseTestCase):
@classmethod
def setUpClass(cls):
# Install the redash-dummy package temporarily using pip
# in the user's local site package directory under ~/.local/
subprocess.call(["pip", "install", "--user", dummy_path])
@classmethod
def tearDownClass(cls):
subprocess.call(["pip", "uninstall", "-y", "redash-dummy"])
def test_bundle_extensions(self):
# cleaning up after running bundle-extensions again
self.addCleanup(lambda: shutil.rmtree(test_bundle.parent))
assert not test_bundle.exists()
subprocess.run(str(app_dir / "bin" / "bundle-extensions"), check=True)
assert test_bundle.exists()