mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 10:00:45 -05:00
Compare commits
79 Commits
24.03.0-de
...
24.07.0-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
db4fdd003e | ||
|
|
4cb32fc1c3 | ||
|
|
a6c728b99c | ||
|
|
01e036d0a9 | ||
|
|
17fe69f551 | ||
|
|
bceaab0496 | ||
|
|
70dd05916f | ||
|
|
60a12e906e | ||
|
|
ec051a8939 | ||
|
|
60d3c66a8b | ||
|
|
bd4ba96c43 | ||
|
|
10a46fd33c | ||
|
|
c874eb6b11 | ||
|
|
f3a323695f | ||
|
|
408ba78bd0 | ||
|
|
58cc49bc88 | ||
|
|
753ea846ff | ||
|
|
1b946b59ec | ||
|
|
4569191113 | ||
|
|
62890c3ec4 | ||
|
|
bd115e7f5f | ||
|
|
bd17662005 | ||
|
|
b7f22b1896 | ||
|
|
897c683980 | ||
|
|
2b974e12ed | ||
|
|
372adfed6b | ||
|
|
dbab9cadb4 | ||
|
|
06244716e6 | ||
|
|
f09760389a | ||
|
|
84e6d3cad5 | ||
|
|
3399e3761e | ||
|
|
1c48b2218b | ||
|
|
5ac5d86f5e | ||
|
|
5e4764af9c | ||
|
|
e2a39de7d1 | ||
|
|
6c68b48917 | ||
|
|
7e8a61c73d | ||
|
|
991e94dd6a | ||
|
|
2ffeecb813 | ||
|
|
3dd855aef1 | ||
|
|
713aca440a | ||
|
|
70bb684d9e | ||
|
|
4034f791c3 | ||
|
|
b9875a231b | ||
|
|
062a70cf20 | ||
|
|
c12d45077a | ||
|
|
6d6412753d | ||
|
|
275e12e7c1 | ||
|
|
77d7508cee | ||
|
|
9601660751 | ||
|
|
45c6fa0591 | ||
|
|
95ecb8e229 | ||
|
|
cb0707176c | ||
|
|
d7247f8b84 | ||
|
|
776703fab7 | ||
|
|
34cde71238 | ||
|
|
f631075be3 | ||
|
|
3f19534301 | ||
|
|
24dec192ee | ||
|
|
82d88ed4eb | ||
|
|
af0773c58a | ||
|
|
15e6583d72 | ||
|
|
4eb5f4e47f | ||
|
|
a0f5c706ff | ||
|
|
702a550659 | ||
|
|
38a06c7ab9 | ||
|
|
a6074878bb | ||
|
|
fb348c7116 | ||
|
|
24419863ec | ||
|
|
c4d3d9c683 | ||
|
|
1672cd9280 | ||
|
|
6575a6499a | ||
|
|
e360e4658e | ||
|
|
107933c363 | ||
|
|
667a696ca5 | ||
|
|
7d0d242072 | ||
|
|
d554136f70 | ||
|
|
34723e2f3e | ||
|
|
11794b3fe3 |
@@ -5,7 +5,7 @@ WORKDIR $APP
|
||||
|
||||
COPY package.json yarn.lock .yarnrc $APP/
|
||||
COPY viz-lib $APP/viz-lib
|
||||
RUN npm install yarn@1.22.19 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null
|
||||
RUN npm install yarn@1.22.22 -g && yarn --frozen-lockfile --network-concurrency 1 > /dev/null
|
||||
|
||||
COPY . $APP
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
|
||||
@@ -66,7 +66,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
client/.tmp/
|
||||
client/dist/
|
||||
node_modules/
|
||||
viz-lib/node_modules/
|
||||
.tmp/
|
||||
|
||||
15
.github/workflows/ci.yml
vendored
15
.github/workflows/ci.yml
vendored
@@ -8,6 +8,7 @@ on:
|
||||
- master
|
||||
env:
|
||||
NODE_VERSION: 18
|
||||
YARN_VERSION: 1.22.22
|
||||
jobs:
|
||||
backend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
@@ -19,7 +20,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- run: sudo pip install black==23.1.0 ruff==0.0.287
|
||||
@@ -82,13 +83,13 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run Lint
|
||||
run: yarn lint:ci
|
||||
@@ -109,13 +110,13 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run App Tests
|
||||
run: yarn test
|
||||
@@ -142,7 +143,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
@@ -152,7 +153,7 @@ jobs:
|
||||
echo "CODE_COVERAGE=true" >> "$GITHUB_ENV"
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Setup Redash Server
|
||||
run: |
|
||||
|
||||
22
.github/workflows/periodic-snapshot.yml
vendored
22
.github/workflows/periodic-snapshot.yml
vendored
@@ -3,7 +3,7 @@ name: Periodic Snapshot
|
||||
# 10 minutes after midnight on the first of every month
|
||||
on:
|
||||
schedule:
|
||||
- cron: "10 0 1 * *"
|
||||
- cron: '10 0 1 * *'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -16,13 +16,15 @@ jobs:
|
||||
with:
|
||||
ssh-key: ${{secrets.ACTION_PUSH_KEY}}
|
||||
- run: |
|
||||
date="$(date +%y.%m).0-dev"
|
||||
gawk -i inplace -F: -v q=\" -v tag=$date '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=$date '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
gawk -i inplace -F= -v q=\" -v tag=$date '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
# https://api.github.com/users/github-actions[bot]
|
||||
git config user.name 'github-actions[bot]'
|
||||
git config user.email '41898282+github-actions[bot]@users.noreply.github.com'
|
||||
|
||||
TAG_NAME="$(date +%y.%m).0-dev"
|
||||
gawk -i inplace -F: -v q=\" -v tag=${TAG_NAME} '/^ "version": / { print $1 FS, q tag q ","; next} { print }' package.json
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^__version__ =/ { print $1 FS, q tag q; next} { print }' redash/__init__.py
|
||||
gawk -i inplace -F= -v q=\" -v tag=${TAG_NAME} '/^version =/ { print $1 FS, q tag q; next} { print }' pyproject.toml
|
||||
git add package.json redash/__init__.py pyproject.toml
|
||||
git commit -m "Snapshot: ${date}"
|
||||
git tag $date
|
||||
git push --atomic origin master refs/tags/$date
|
||||
git commit -m "Snapshot: ${TAG_NAME}"
|
||||
git tag ${TAG_NAME}
|
||||
git push --atomic origin master refs/tags/${TAG_NAME}
|
||||
|
||||
2
.github/workflows/preview-image.yml
vendored
2
.github/workflows/preview-image.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.19
|
||||
npm install --global --force yarn@1.22.22
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
FROM node:18-bookworm as frontend-builder
|
||||
|
||||
RUN npm install --global --force yarn@1.22.19
|
||||
RUN npm install --global --force yarn@1.22.22
|
||||
|
||||
# Controls whether to build the frontend assets
|
||||
ARG skip_frontend_build
|
||||
@@ -26,7 +26,7 @@ COPY --chown=redash client /frontend/client
|
||||
COPY --chown=redash webpack.config.js /frontend/
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
|
||||
|
||||
FROM python:3.8-slim-bookworm
|
||||
FROM python:3.10-slim-bookworm
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
@@ -68,7 +68,7 @@ RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
||||
curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor -o /usr/share/keyrings/microsoft-prod.gpg \
|
||||
&& curl https://packages.microsoft.com/config/debian/12/prod.list > /etc/apt/sources.list.d/mssql-release.list \
|
||||
&& apt-get update \
|
||||
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql17 \
|
||||
&& ACCEPT_EULA=Y apt-get install -y --no-install-recommends msodbcsql18 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& curl "$databricks_odbc_driver_url" --location --output /tmp/simba_odbc.zip \
|
||||
|
||||
18
Makefile
18
Makefile
@@ -1,4 +1,4 @@
|
||||
.PHONY: compose_build up test_db create_database clean down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
.PHONY: compose_build up test_db create_database clean clean-all down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
|
||||
compose_build: .env
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose build
|
||||
@@ -17,7 +17,21 @@ create_database: .env
|
||||
docker compose run server create_db
|
||||
|
||||
clean:
|
||||
docker compose down && docker compose rm
|
||||
docker compose down
|
||||
docker compose --project-name cypress down
|
||||
docker compose rm --stop --force
|
||||
docker compose --project-name cypress rm --stop --force
|
||||
docker image rm --force \
|
||||
cypress-server:latest cypress-worker:latest cypress-scheduler:latest \
|
||||
redash-server:latest redash-worker:latest redash-scheduler:latest
|
||||
docker container prune --force
|
||||
docker image prune --force
|
||||
docker volume prune --force
|
||||
|
||||
clean-all: clean
|
||||
docker image rm --force \
|
||||
redash/redash:10.1.0.b50633 redis:7-alpine maildev/maildev:latest \
|
||||
pgautoupgrade/pgautoupgrade:15-alpine3.8 pgautoupgrade/pgautoupgrade:latest
|
||||
|
||||
down:
|
||||
docker compose down
|
||||
|
||||
@@ -84,6 +84,7 @@ Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help
|
||||
- Python
|
||||
- Qubole
|
||||
- Rockset
|
||||
- RisingWave
|
||||
- Salesforce
|
||||
- ScyllaDB
|
||||
- Shell Scripts
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 2.4 KiB |
BIN
client/app/assets/images/db-logos/risingwave.png
Normal file
BIN
client/app/assets/images/db-logos/risingwave.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 9.7 KiB |
@@ -1,6 +1,5 @@
|
||||
import React from "react";
|
||||
import Link from "@/components/Link";
|
||||
import { clientConfig, currentUser } from "@/services/auth";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
import frontendVersion from "@/version.json";
|
||||
|
||||
export default function VersionInfo() {
|
||||
@@ -10,15 +9,6 @@ export default function VersionInfo() {
|
||||
Version: {clientConfig.version}
|
||||
{frontendVersion !== clientConfig.version && ` (${frontendVersion.substring(0, 8)})`}
|
||||
</div>
|
||||
{clientConfig.newVersionAvailable && currentUser.hasPermission("super_admin") && (
|
||||
<div className="m-t-10">
|
||||
{/* eslint-disable react/jsx-no-target-blank */}
|
||||
<Link href="https://version.redash.io/" className="update-available" target="_blank" rel="noopener">
|
||||
Update Available <i className="fa fa-external-link m-l-5" aria-hidden="true" />
|
||||
<span className="sr-only">(opens in a new tab)</span>
|
||||
</Link>
|
||||
</div>
|
||||
)}
|
||||
</React.Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
import React, { useState } from "react";
|
||||
import Card from "antd/lib/card";
|
||||
import Button from "antd/lib/button";
|
||||
import Typography from "antd/lib/typography";
|
||||
import { clientConfig } from "@/services/auth";
|
||||
import Link from "@/components/Link";
|
||||
import HelpTrigger from "@/components/HelpTrigger";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import OrgSettings from "@/services/organizationSettings";
|
||||
|
||||
const Text = Typography.Text;
|
||||
|
||||
function BeaconConsent() {
|
||||
const [hide, setHide] = useState(false);
|
||||
|
||||
if (!clientConfig.showBeaconConsentMessage || hide) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const hideConsentCard = () => {
|
||||
clientConfig.showBeaconConsentMessage = false;
|
||||
setHide(true);
|
||||
};
|
||||
|
||||
const confirmConsent = confirm => {
|
||||
let message = "🙏 Thank you.";
|
||||
|
||||
if (!confirm) {
|
||||
message = "Settings Saved.";
|
||||
}
|
||||
|
||||
OrgSettings.save({ beacon_consent: confirm }, message)
|
||||
// .then(() => {
|
||||
// // const settings = get(response, 'settings');
|
||||
// // this.setState({ settings, formValues: { ...settings } });
|
||||
// })
|
||||
.finally(hideConsentCard);
|
||||
};
|
||||
|
||||
return (
|
||||
<DynamicComponent name="BeaconConsent">
|
||||
<div className="m-t-10 tiled">
|
||||
<Card
|
||||
title={
|
||||
<>
|
||||
Would you be ok with sharing anonymous usage data with the Redash team?{" "}
|
||||
<HelpTrigger type="USAGE_DATA_SHARING" />
|
||||
</>
|
||||
}
|
||||
bordered={false}>
|
||||
<Text>Help Redash improve by automatically sending anonymous usage data:</Text>
|
||||
<div className="m-t-5">
|
||||
<ul>
|
||||
<li> Number of users, queries, dashboards, alerts, widgets and visualizations.</li>
|
||||
<li> Types of data sources, alert destinations and visualizations.</li>
|
||||
</ul>
|
||||
</div>
|
||||
<Text>All data is aggregated and will never include any sensitive or private data.</Text>
|
||||
<div className="m-t-5">
|
||||
<Button type="primary" className="m-r-5" onClick={() => confirmConsent(true)}>
|
||||
Yes
|
||||
</Button>
|
||||
<Button type="default" onClick={() => confirmConsent(false)}>
|
||||
No
|
||||
</Button>
|
||||
</div>
|
||||
<div className="m-t-15">
|
||||
<Text type="secondary">
|
||||
You can change this setting anytime from the{" "}
|
||||
<Link href="settings/organization">Organization Settings</Link> page.
|
||||
</Text>
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
export default BeaconConsent;
|
||||
@@ -23,7 +23,6 @@ export const TYPES = mapValues(
|
||||
VALUE_SOURCE_OPTIONS: ["/user-guide/querying/query-parameters#Value-Source-Options", "Guide: Value Source Options"],
|
||||
SHARE_DASHBOARD: ["/user-guide/dashboards/sharing-dashboards", "Guide: Sharing and Embedding Dashboards"],
|
||||
AUTHENTICATION_OPTIONS: ["/user-guide/users/authentication-options", "Guide: Authentication Options"],
|
||||
USAGE_DATA_SHARING: ["/open-source/admin-guide/usage-data", "Help: Anonymous Usage Data Sharing"],
|
||||
DS_ATHENA: ["/data-sources/amazon-athena-setup", "Guide: Help Setting up Amazon Athena"],
|
||||
DS_BIGQUERY: ["/data-sources/bigquery-setup", "Guide: Help Setting up BigQuery"],
|
||||
DS_URL: ["/data-sources/querying-urls", "Guide: Help Setting up URL"],
|
||||
|
||||
@@ -148,7 +148,9 @@ function EditVisualizationDialog({ dialog, visualization, query, queryResult })
|
||||
|
||||
function dismiss() {
|
||||
const optionsChanged = !isEqual(options, defaultState.originalOptions);
|
||||
confirmDialogClose(nameChanged || optionsChanged).then(dialog.dismiss);
|
||||
confirmDialogClose(nameChanged || optionsChanged)
|
||||
.then(dialog.dismiss)
|
||||
.catch(() => {});
|
||||
}
|
||||
|
||||
// When editing existing visualization chart type selector is disabled, so add only existing visualization's
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<meta charset="UTF-8" />
|
||||
<base href="{{base_href}}" />
|
||||
<title><%= htmlWebpackPlugin.options.title %></title>
|
||||
<script src="/static/unsupportedRedirect.js" async></script>
|
||||
<script src="<%= htmlWebpackPlugin.options.staticPath %>unsupportedRedirect.js" async></script>
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/static/images/favicon-32x32.png" />
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/static/images/favicon-96x96.png" />
|
||||
|
||||
@@ -6,7 +6,6 @@ import Link from "@/components/Link";
|
||||
import routeWithUserSession from "@/components/ApplicationArea/routeWithUserSession";
|
||||
import EmptyState, { EmptyStateHelpMessage } from "@/components/empty-state/EmptyState";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import BeaconConsent from "@/components/BeaconConsent";
|
||||
import PlainButton from "@/components/PlainButton";
|
||||
|
||||
import { axios } from "@/services/axios";
|
||||
@@ -89,7 +88,6 @@ export default function Home() {
|
||||
</DynamicComponent>
|
||||
<DynamicComponent name="HomeExtra" />
|
||||
<DashboardAndQueryFavoritesList />
|
||||
<BeaconConsent />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
import React from "react";
|
||||
import Form from "antd/lib/form";
|
||||
import Checkbox from "antd/lib/checkbox";
|
||||
import Skeleton from "antd/lib/skeleton";
|
||||
import HelpTrigger from "@/components/HelpTrigger";
|
||||
import DynamicComponent from "@/components/DynamicComponent";
|
||||
import { SettingsEditorPropTypes, SettingsEditorDefaultProps } from "../prop-types";
|
||||
|
||||
export default function BeaconConsentSettings(props) {
|
||||
const { values, onChange, loading } = props;
|
||||
|
||||
return (
|
||||
<DynamicComponent name="OrganizationSettings.BeaconConsentSettings" {...props}>
|
||||
<Form.Item
|
||||
label={
|
||||
<span>
|
||||
Anonymous Usage Data Sharing
|
||||
<HelpTrigger className="m-l-5 m-r-5" type="USAGE_DATA_SHARING" />
|
||||
</span>
|
||||
}>
|
||||
{loading ? (
|
||||
<Skeleton title={{ width: 300 }} paragraph={false} active />
|
||||
) : (
|
||||
<Checkbox
|
||||
name="beacon_consent"
|
||||
checked={values.beacon_consent}
|
||||
onChange={e => onChange({ beacon_consent: e.target.checked })}>
|
||||
Help Redash improve by automatically sending anonymous usage data
|
||||
</Checkbox>
|
||||
)}
|
||||
</Form.Item>
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
BeaconConsentSettings.propTypes = SettingsEditorPropTypes;
|
||||
|
||||
BeaconConsentSettings.defaultProps = SettingsEditorDefaultProps;
|
||||
@@ -4,7 +4,6 @@ import DynamicComponent from "@/components/DynamicComponent";
|
||||
import FormatSettings from "./FormatSettings";
|
||||
import PlotlySettings from "./PlotlySettings";
|
||||
import FeatureFlagsSettings from "./FeatureFlagsSettings";
|
||||
import BeaconConsentSettings from "./BeaconConsentSettings";
|
||||
|
||||
export default function GeneralSettings(props) {
|
||||
return (
|
||||
@@ -14,7 +13,6 @@ export default function GeneralSettings(props) {
|
||||
<FormatSettings {...props} />
|
||||
<PlotlySettings {...props} />
|
||||
<FeatureFlagsSettings {...props} />
|
||||
<BeaconConsentSettings {...props} />
|
||||
</DynamicComponent>
|
||||
);
|
||||
}
|
||||
|
||||
24
compose.base.yaml
Normal file
24
compose.base.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
services:
|
||||
.redash:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
FRONTEND_BUILD_MODE: ${FRONTEND_BUILD_MODE:-2}
|
||||
INSTALL_GROUPS: ${INSTALL_GROUPS:-main,all_ds,dev}
|
||||
volumes:
|
||||
- $PWD:${SERVER_MOUNT:-/ignore}
|
||||
command: manage version
|
||||
environment:
|
||||
REDASH_LOG_LEVEL: INFO
|
||||
REDASH_REDIS_URL: redis://redis:6379/0
|
||||
REDASH_DATABASE_URL: postgresql://postgres@postgres/postgres
|
||||
REDASH_RATELIMIT_ENABLED: false
|
||||
REDASH_MAIL_DEFAULT_SENDER: redash@example.com
|
||||
REDASH_MAIL_SERVER: email
|
||||
REDASH_MAIL_PORT: 1025
|
||||
REDASH_ENFORCE_CSRF: true
|
||||
REDASH_COOKIE_SECRET: ${REDASH_COOKIE_SECRET}
|
||||
REDASH_SECRET_KEY: ${REDASH_SECRET_KEY}
|
||||
REDASH_PRODUCTION: ${REDASH_PRODUCTION:-true}
|
||||
env_file:
|
||||
- .env
|
||||
@@ -52,7 +52,7 @@ services:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:15-alpine3.8
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
ports:
|
||||
- "15432:5432"
|
||||
# The following turns the DB into less durable, but gains significant performance improvements for the tests run (x3
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "redash-client",
|
||||
"version": "24.03.0-dev",
|
||||
"version": "24.07.0-dev",
|
||||
"description": "The frontend part of Redash.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
|
||||
460
poetry.lock
generated
460
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "adal"
|
||||
@@ -419,17 +419,6 @@ files = [
|
||||
{file = "blinker-1.6.2.tar.gz", hash = "sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "boto"
|
||||
version = "2.49.0"
|
||||
description = "Amazon Web Services Library"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "boto-2.49.0-py2.py3-none-any.whl", hash = "sha256:147758d41ae7240dc989f0039f27da8ca0d53734be0eb869ef16e3adcfa462e8"},
|
||||
{file = "boto-2.49.0.tar.gz", hash = "sha256:ea0d3b40a2d852767be77ca343b58a9e3a4b00d9db440efb8da74b4e58025e5a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.28.8"
|
||||
@@ -891,20 +880,15 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1
|
||||
toml = ["tomli"]
|
||||
|
||||
[[package]]
|
||||
name = "croniter"
|
||||
version = "2.0.1"
|
||||
description = "croniter provides iteration for datetime object with cron like format"
|
||||
name = "crontab"
|
||||
version = "1.0.1"
|
||||
description = "Parse and use crontab schedules in Python"
|
||||
optional = false
|
||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "croniter-2.0.1-py2.py3-none-any.whl", hash = "sha256:4cb064ce2d8f695b3b078be36ff50115cf8ac306c10a7e8653ee2a5b534673d7"},
|
||||
{file = "croniter-2.0.1.tar.gz", hash = "sha256:d199b2ec3ea5e82988d1f72022433c5f9302b3b3ea9e6bfd6a1518f6ea5e700a"},
|
||||
{file = "crontab-1.0.1.tar.gz", hash = "sha256:89477e3f93c81365e738d5ee2659509e6373bb2846de13922663e79aa74c6b91"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
python-dateutil = "*"
|
||||
pytz = ">2021.1"
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "41.0.6"
|
||||
@@ -1041,22 +1025,23 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "dnspython"
|
||||
version = "2.4.2"
|
||||
version = "2.6.1"
|
||||
description = "DNS toolkit"
|
||||
optional = false
|
||||
python-versions = ">=3.8,<4.0"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"},
|
||||
{file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"},
|
||||
{file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"},
|
||||
{file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dnssec = ["cryptography (>=2.6,<42.0)"]
|
||||
doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"]
|
||||
doq = ["aioquic (>=0.9.20)"]
|
||||
idna = ["idna (>=2.1,<4.0)"]
|
||||
trio = ["trio (>=0.14,<0.23)"]
|
||||
wmi = ["wmi (>=1.5.1,<2.0.0)"]
|
||||
dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
|
||||
dnssec = ["cryptography (>=41)"]
|
||||
doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"]
|
||||
doq = ["aioquic (>=0.9.25)"]
|
||||
idna = ["idna (>=3.6)"]
|
||||
trio = ["trio (>=0.23)"]
|
||||
wmi = ["wmi (>=1.5.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "e6data-python-connector"
|
||||
@@ -1729,22 +1714,23 @@ google-auth-oauthlib = ">=0.4.1"
|
||||
|
||||
[[package]]
|
||||
name = "gunicorn"
|
||||
version = "20.0.4"
|
||||
version = "22.0.0"
|
||||
description = "WSGI HTTP Server for UNIX"
|
||||
optional = false
|
||||
python-versions = ">=3.4"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "gunicorn-20.0.4-py2.py3-none-any.whl", hash = "sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c"},
|
||||
{file = "gunicorn-20.0.4.tar.gz", hash = "sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626"},
|
||||
{file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"},
|
||||
{file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
setuptools = ">=3.0"
|
||||
packaging = "*"
|
||||
|
||||
[package.extras]
|
||||
eventlet = ["eventlet (>=0.9.7)"]
|
||||
gevent = ["gevent (>=0.13)"]
|
||||
eventlet = ["eventlet (>=0.24.1,!=0.36.0)"]
|
||||
gevent = ["gevent (>=1.4.0)"]
|
||||
setproctitle = ["setproctitle"]
|
||||
testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"]
|
||||
tornado = ["tornado (>=0.2)"]
|
||||
|
||||
[[package]]
|
||||
@@ -1832,13 +1818,13 @@ license = ["ukkonen"]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.6"
|
||||
version = "3.7"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
|
||||
{file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
|
||||
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
|
||||
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1897,16 +1883,6 @@ thriftpy2 = {version = ">=0.4.0,<0.5.0", markers = "python_version >= \"3.0\""}
|
||||
[package.extras]
|
||||
kerberos = ["thrift_sasl (==0.2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "inflection"
|
||||
version = "0.3.1"
|
||||
description = "A port of Ruby on Rails inflector to Python"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "inflection-0.3.1.tar.gz", hash = "sha256:18ea7fb7a7d152853386523def08736aa8c32636b047ade55f7578c4edeb16ca"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "influxdb"
|
||||
version = "5.2.3"
|
||||
@@ -2008,13 +1984,13 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "jinja2"
|
||||
version = "3.1.3"
|
||||
version = "3.1.4"
|
||||
description = "A very fast and expressive template engine."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"},
|
||||
{file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"},
|
||||
{file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
|
||||
{file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2057,17 +2033,18 @@ format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors
|
||||
|
||||
[[package]]
|
||||
name = "jwcrypto"
|
||||
version = "1.5.1"
|
||||
version = "1.5.6"
|
||||
description = "Implementation of JOSE Web standards"
|
||||
optional = false
|
||||
python-versions = ">= 3.6"
|
||||
python-versions = ">= 3.8"
|
||||
files = [
|
||||
{file = "jwcrypto-1.5.1.tar.gz", hash = "sha256:48bb9bf433777136253579e52b75ffe0f9a4a721d133d01f45a0b91ed5f4f1ae"},
|
||||
{file = "jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789"},
|
||||
{file = "jwcrypto-1.5.6.tar.gz", hash = "sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=3.4"
|
||||
deprecated = "*"
|
||||
typing-extensions = ">=4.5.0"
|
||||
|
||||
[[package]]
|
||||
name = "ldap3"
|
||||
@@ -2661,42 +2638,42 @@ et-xmlfile = "*"
|
||||
|
||||
[[package]]
|
||||
name = "oracledb"
|
||||
version = "2.0.1"
|
||||
version = "2.1.2"
|
||||
description = "Python interface to Oracle Database"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "oracledb-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:deff10e6fd97cc6f6fa9bc94ebcc3875a38fbcfd1de5ce4c372658ff82d5037b"},
|
||||
{file = "oracledb-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b68a09ead904982f260bdb1c9f1565777f0d8893e95599a460068d2824d9a6"},
|
||||
{file = "oracledb-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f77566314fc2a2da6022d9bbb769c0d08724c6b321ce0bae73b627d65639cccc"},
|
||||
{file = "oracledb-2.0.1-cp310-cp310-win32.whl", hash = "sha256:b82d92c3c25550033bf41263f30fa9c775bd35b5e57d95b5d2a4d4ae83e456c6"},
|
||||
{file = "oracledb-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:608a7baf1adb952848e1534dab59aa3b1b7bf3771e6940d042b40826f2747aaf"},
|
||||
{file = "oracledb-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:903a51550ad339d475fd1cd77059157687608fc5da3af3641728c0baada5aa06"},
|
||||
{file = "oracledb-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:465389b3f9b54edcb364d66d848b048982beaaa4a24ff9a23b29582e5ac33dc9"},
|
||||
{file = "oracledb-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d504cc4ee4d2c511ca3185ef8ebf04a538d749fc30d4cc59843a6cff5d11df6"},
|
||||
{file = "oracledb-2.0.1-cp311-cp311-win32.whl", hash = "sha256:4b8a23bcd94a790f92680ae44f877ba8e01626c9deb41e9f788a1ef788ff2ca8"},
|
||||
{file = "oracledb-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:b301f68bf1f4e79e7b1b00dc070853b3256f00dea503f80ce7d22cba5f6559f4"},
|
||||
{file = "oracledb-2.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4fcd5f3127cd833939dfccc615c0ae328535feba3c66765d8308e2bbdecda5dd"},
|
||||
{file = "oracledb-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7cc2c51ed0657f6bda23002e7e407c4c971c9c5d90baa073d78b056e77672d"},
|
||||
{file = "oracledb-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3338e31fa946b84baf1ae675cb9ab242464dd072cbafc7e885a1b1130215fa9d"},
|
||||
{file = "oracledb-2.0.1-cp312-cp312-win32.whl", hash = "sha256:a64690c6efef0ebe04684ce893a116357b02c269e1ae88653165ac04bbcb5a8a"},
|
||||
{file = "oracledb-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:08827b11a8a5e6485b1f694714550c856db8f288b85f0e0c9a5a2bbbde1a92ae"},
|
||||
{file = "oracledb-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:73f90d6ea1a3b9b26457ecd380541fe97216867b5c757e3d66f047aa2c0e94e7"},
|
||||
{file = "oracledb-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3840ceecb83dc55c8e03d3f559eb0a640cf06a7584f30cb760ba580fdbfa3501"},
|
||||
{file = "oracledb-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:805d1d736ca137427521499f9a58ccfe981a1ccbb9be5cabc2e204dee7700790"},
|
||||
{file = "oracledb-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:1dd8a0f39cb62b33a6bba2b601435522ee42ad5a526db3d2f858b671bf87d956"},
|
||||
{file = "oracledb-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:45eaf9eb0d7c27ba9b1cfb09c86ffefeeffa82d90dc3ad910725623a11495e1e"},
|
||||
{file = "oracledb-2.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:b646b8bcacb1163a64646922965060131982641d704396585ed5f663329e3a0e"},
|
||||
{file = "oracledb-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b89b812e79ddfd670b0147717b3c1c8da1f2b967c5d2bc6dd8ce96f6ffef214"},
|
||||
{file = "oracledb-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf901522ee069fe6a44c32864b1b42016613b78f7acfddd2075d0d3f117ac6f7"},
|
||||
{file = "oracledb-2.0.1-cp38-cp38-win32.whl", hash = "sha256:bd3d2f8ec54d741b74aaa551d127d25a6bbd01351736dc2645ed0d2c6b3f828e"},
|
||||
{file = "oracledb-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:ba7c086d17852564c3e906ce5f285e97d3da261672febfc83e4a8d5882795ae8"},
|
||||
{file = "oracledb-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4f1d99ed802131197b21b011bdbf90f418ff7fac95daf2419c79dd6355c469b5"},
|
||||
{file = "oracledb-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b555d1da8ac6a0922e66127b1ed424f6814d9789b1551df0d6d37df8d006e79"},
|
||||
{file = "oracledb-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f05c7a5285c06a2acd55ed60aa24bd1adb931686f56684847672c9cef7dfec3"},
|
||||
{file = "oracledb-2.0.1-cp39-cp39-win32.whl", hash = "sha256:bb5dda68fb8ca7c817d2b70f3fdc38cfca1f635e35850ffb282f44c86885a22d"},
|
||||
{file = "oracledb-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:20c4bfff62df2198f99c5fc58c8df6ef934f5583875e5ab43bf6438b920f0cba"},
|
||||
{file = "oracledb-2.0.1.tar.gz", hash = "sha256:c12235a9eef123038184e57f3b9b145e149b22654e8242024cf4e81cd890f523"},
|
||||
{file = "oracledb-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4ffaba9504c638c29129b484cf547accf750bd0f86df1ca6194646a4d2540691"},
|
||||
{file = "oracledb-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d98deb1e3a500920f5460d457925f0c8cef8d037881fdbd16df1c4734453dd"},
|
||||
{file = "oracledb-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bde2df672704fbe12ab0653f6e808b1ed62de28c6864b17fc3a1fcac9c1fd472"},
|
||||
{file = "oracledb-2.1.2-cp310-cp310-win32.whl", hash = "sha256:3b3798a1220fc8736a37b9280d0ae4cdf263bb203fc6e2b3a82c33f9a2010702"},
|
||||
{file = "oracledb-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:92620efd5eb0d23b252d75f2f2ff1deadf25f44546903e3283760cb276d524ed"},
|
||||
{file = "oracledb-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b913a164e1830d0e955b88d97c5e4da4d2402f8a8b0d38febb6ad5a8ef9e4743"},
|
||||
{file = "oracledb-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c53827344c6d001f492aee0a3acb6c1b6c0f3030c2f5dc8cb86dc4f0bb4dd1ab"},
|
||||
{file = "oracledb-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50225074841d5f9b281d620c012ced4b0946ff5a941c8b639be7babda5190709"},
|
||||
{file = "oracledb-2.1.2-cp311-cp311-win32.whl", hash = "sha256:a043b4df2919411b787bcd24ffa4286249a11d05d29bb20bb076d108c3c6f777"},
|
||||
{file = "oracledb-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:9edce208c26ee018e43b75323888743031be3e9f0c0e4221abf037129c12d949"},
|
||||
{file = "oracledb-2.1.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:08aa313b801dda950918168d3962ba59a617adce143e0c2bf1ee9b847695faaa"},
|
||||
{file = "oracledb-2.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de5c932b04d3bcdd22c71c0e5c5e1d16b6a3a2fc68dc472ee3a12e677461354c"},
|
||||
{file = "oracledb-2.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d590caf39b1901bcba394fcda9815438faff0afaf374025f89ef5d65993d0a4"},
|
||||
{file = "oracledb-2.1.2-cp312-cp312-win32.whl", hash = "sha256:1e3ffdfe76c97d1ca13a3fecf239c96d3889015bb5b775dc22b947108044b01e"},
|
||||
{file = "oracledb-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:8c1eaf8c74bb6de5772de768f2f3f5eb935ab935c633d3a012ddff7e691a2073"},
|
||||
{file = "oracledb-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2ee06e154e08cc5e4037855d74dc6e37dc054c91a7a1a372bb60d4442e2ed3d"},
|
||||
{file = "oracledb-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a21d84aaf5dddab0cfa8ab7c23272c0295a5c796f212a4ce8a6b499643663dd"},
|
||||
{file = "oracledb-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b337f7cf30753c3a32302fbc25ca80d7ff5049dd9333e681236a674a90c21caf"},
|
||||
{file = "oracledb-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:b5d936763a9b26d32c4e460dbb346c2a962fcc98e6df33dd2d81fdc2eb26f1e4"},
|
||||
{file = "oracledb-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0ea32b87b7202811d85082f10bf7789747ce45f195be4199c5611e7d76a79e78"},
|
||||
{file = "oracledb-2.1.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:f94b22da87e051e3a8620d2b04d99e1cc9d9abb4da6736d6ae0ca436ba03fb86"},
|
||||
{file = "oracledb-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:391034ee66717dba514e765263d08d18a2aa7badde373f82599b89e46fa3720a"},
|
||||
{file = "oracledb-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a2d9891244b9b94465e30af8cc79380bbb41081c5dc0511cbc94cc250e9e26d"},
|
||||
{file = "oracledb-2.1.2-cp38-cp38-win32.whl", hash = "sha256:9a9a6e0bf61952c2c82614b98fe896d2cda17d81ffca4527556e6607b10e3365"},
|
||||
{file = "oracledb-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:d9a6447589f203ca846526c99a667537b099d54ddeff09d24f9da59bdcc8f98b"},
|
||||
{file = "oracledb-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eb688dd1f8ea2038d17bc84fb651aa1e994b155d3cb8b8387df70ab2a7b4c4c"},
|
||||
{file = "oracledb-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f22c31b894bb085a33d70e174c9bcd0abafc630c2c941ff0d630ee3852f1aa6"},
|
||||
{file = "oracledb-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5bc03520b8bd4dbf2ac4d937d298a85a7208ffbeec738eea92ad7bb00e7134a"},
|
||||
{file = "oracledb-2.1.2-cp39-cp39-win32.whl", hash = "sha256:5d4f6bd1036d7edbb96d8d31f0ca53696a013c00ac82fc19ac0ca374d2265b2c"},
|
||||
{file = "oracledb-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:69bde9770392c1c859b1e1d767dbb9ca4c57e3f2946ca90c779d9402a7e96111"},
|
||||
{file = "oracledb-2.1.2.tar.gz", hash = "sha256:3054bcc295d7378834ba7a5aceb865985e954915f9b07a843ea84c3824c6a0b2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3389,85 +3366,93 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pymongo"
|
||||
version = "4.3.3"
|
||||
version = "4.6.3"
|
||||
description = "Python driver for MongoDB <http://www.mongodb.org>"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pymongo-4.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:74731c9e423c93cbe791f60c27030b6af6a948cef67deca079da6cd1bb583a8e"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:66413c50d510e5bcb0afc79880d1693a2185bcea003600ed898ada31338c004e"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9b87b23570565a6ddaa9244d87811c2ee9cffb02a753c8a2da9c077283d85845"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:695939036a320f4329ccf1627edefbbb67cc7892b8222d297b0dd2313742bfee"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:ffcc8394123ea8d43fff8e5d000095fe7741ce3f8988366c5c919c4f5eb179d3"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:943f208840777f34312c103a2d1caab02d780c4e9be26b3714acf6c4715ba7e1"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:01f7cbe88d22440b6594c955e37312d932fd632ffed1a86d0c361503ca82cc9d"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdb87309de97c63cb9a69132e1cb16be470e58cffdfbad68fdd1dc292b22a840"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d86c35d94b5499689354ccbc48438a79f449481ee6300f3e905748edceed78e7"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a966d5304b7d90c45c404914e06bbf02c5bf7e99685c6c12f0047ef2aa837142"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be1d2ce7e269215c3ee9a215e296b7a744aff4f39233486d2c4d77f5f0c561a6"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b6163dac53ef1e5d834297810c178050bd0548a4136cd4e0f56402185916ca"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-win32.whl", hash = "sha256:dc0cff74cd36d7e1edba91baa09622c35a8a57025f2f2b7a41e3f83b1db73186"},
|
||||
{file = "pymongo-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:cafa52873ae12baa512a8721afc20de67a36886baae6a5f394ddef0ce9391f91"},
|
||||
{file = "pymongo-4.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:599d3f6fbef31933b96e2d906b0f169b3371ff79ea6aaf6ecd76c947a3508a3d"},
|
||||
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0640b4e9d008e13956b004d1971a23377b3d45491f87082161c92efb1e6c0d6"},
|
||||
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:341221e2f2866a5960e6f8610f4cbac0bb13097f3b1a289aa55aba984fc0d969"},
|
||||
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7fac06a539daef4fcf5d8288d0d21b412f9b750454cd5a3cf90484665db442a"},
|
||||
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a51901066696c4af38c6c63a1f0aeffd5e282367ff475de8c191ec9609b56d"},
|
||||
{file = "pymongo-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3055510fdfdb1775bc8baa359783022f70bb553f2d46e153c094dfcb08578ff"},
|
||||
{file = "pymongo-4.3.3-cp311-cp311-win32.whl", hash = "sha256:524d78673518dcd352a91541ecd2839c65af92dc883321c2109ef6e5cd22ef23"},
|
||||
{file = "pymongo-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b8a03af1ce79b902a43f5f694c4ca8d92c2a4195db0966f08f266549e2fc49bc"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:39b03045c71f761aee96a12ebfbc2f4be89e724ff6f5e31c2574c1a0e2add8bd"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6fcfbf435eebf8a1765c6d1f46821740ebe9f54f815a05c8fc30d789ef43cb12"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:7d43ac9c7eeda5100fb0a7152fab7099c9cf9e5abd3bb36928eb98c7d7a339c6"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3b93043b14ba7eb08c57afca19751658ece1cfa2f0b7b1fb5c7a41452fbb8482"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c09956606c08c4a7c6178a04ba2dd9388fcc5db32002ade9c9bc865ab156ab6d"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:b0cfe925610f2fd59555bb7fc37bd739e4b197d33f2a8b2fae7b9c0c6640318c"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:4d00b91c77ceb064c9b0459f0d6ea5bfdbc53ea9e17cf75731e151ef25a830c7"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:c6258a3663780ae47ba73d43eb63c79c40ffddfb764e09b56df33be2f9479837"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e758f0e734e1e90357ae01ec9c6daf19ff60a051192fe110d8fb25c62600e"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f3621a46cdc7a9ba8080422262398a91762a581d27e0647746588d3f995c88"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47f7aa217b25833cd6f0e72b0d224be55393c2692b4f5e0561cb3beeb10296e9"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2fdc855149efe7cdcc2a01ca02bfa24761c640203ea94df467f3baf19078be"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5effd87c7d363890259eac16c56a4e8da307286012c076223997f8cc4a8c435b"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dd1cf2995fdbd64fc0802313e8323f5fa18994d51af059b5b8862b73b5e53f0"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bb869707d8e30645ed6766e44098600ca6cdf7989c22a3ea2b7966bb1d98d4b2"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-win32.whl", hash = "sha256:49210feb0be8051a64d71691f0acbfbedc33e149f0a5d6e271fddf6a12493fed"},
|
||||
{file = "pymongo-4.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:54c377893f2cbbffe39abcff5ff2e917b082c364521fa079305f6f064e1a24a9"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c184ec5be465c0319440734491e1aa4709b5f3ba75fdfc9dbbc2ae715a7f6829"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:dca34367a4e77fcab0693e603a959878eaf2351585e7d752cac544bc6b2dee46"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd6a4afb20fb3c26a7bfd4611a0bbb24d93cbd746f5eb881f114b5e38fd55501"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0c466710871d0026c190fc4141e810cf9d9affbf4935e1d273fbdc7d7cda6143"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:d07d06dba5b5f7d80f9cc45501456e440f759fe79f9895922ed486237ac378a8"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:711bc52cb98e7892c03e9b669bebd89c0a890a90dbc6d5bb2c47f30239bac6e9"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:34b040e095e1671df0c095ec0b04fc4ebb19c4c160f87c2b55c079b16b1a6b00"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4ed00f96e147f40b565fe7530d1da0b0f3ab803d5dd5b683834500fa5d195ec4"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef888f48eb9203ee1e04b9fb27429017b290fb916f1e7826c2f7808c88798394"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:316498b642c00401370b2156b5233b256f9b33799e0a8d9d0b8a7da217a20fca"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa7e202feb683dad74f00dea066690448d0cfa310f8a277db06ec8eb466601b5"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52896e22115c97f1c829db32aa2760b0d61839cfe08b168c2b1d82f31dbc5f55"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c051fe37c96b9878f37fa58906cb53ecd13dcb7341d3a85f1e2e2f6b10782d9"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5134d33286c045393c7beb51be29754647cec5ebc051cf82799c5ce9820a2ca2"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a9c2885b4a8e6e39db5662d8b02ca6dcec796a45e48c2de12552841f061692ba"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-win32.whl", hash = "sha256:a6cd6f1db75eb07332bd3710f58f5fce4967eadbf751bad653842750a61bda62"},
|
||||
{file = "pymongo-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:d5571b6978750601f783cea07fb6b666837010ca57e5cefa389c1d456f6222e2"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:81d1a7303bd02ca1c5be4aacd4db73593f573ba8e0c543c04c6da6275fd7a47e"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:016c412118e1c23fef3a1eada4f83ae6e8844fd91986b2e066fc1b0013cdd9ae"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8fd6e191b92a10310f5a6cfe10d6f839d79d192fb02480bda325286bd1c7b385"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2961b05f9c04a53da8bfc72f1910b6aec7205fcf3ac9c036d24619979bbee4b"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:b38a96b3eed8edc515b38257f03216f382c4389d022a8834667e2bc63c0c0c31"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:c1a70c51da9fa95bd75c167edb2eb3f3c4d27bc4ddd29e588f21649d014ec0b7"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8a06a0c02f5606330e8f2e2f3b7949877ca7e4024fa2bff5a4506bec66c49ec7"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:6c2216d8b6a6d019c6f4b1ad55f890e5e77eb089309ffc05b6911c09349e7474"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac0a143ef4f28f49670bf89cb15847eb80b375d55eba401ca2f777cd425f338"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08fc250b5552ee97ceeae0f52d8b04f360291285fc7437f13daa516ce38fdbc6"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704d939656e21b073bfcddd7228b29e0e8a93dd27b54240eaafc0b9a631629a6"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1074f1a6f23e28b983c96142f2d45be03ec55d93035b471c26889a7ad2365db3"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b16250238de8dafca225647608dddc7bbb5dce3dd53b4d8e63c1cc287394c2f"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7761cacb8745093062695b11574effea69db636c2fd0a9269a1f0183712927b4"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fd7bb378d82b88387dc10227cfd964f6273eb083e05299e9b97cbe075da12d11"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-win32.whl", hash = "sha256:dc24d245026a72d9b4953729d31813edd4bd4e5c13622d96e27c284942d33f24"},
|
||||
{file = "pymongo-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:fc28e8d85d392a06434e9a934908d97e2cf453d69488d2bcd0bfb881497fd975"},
|
||||
{file = "pymongo-4.3.3.tar.gz", hash = "sha256:34e95ffb0a68bffbc3b437f2d1f25fc916fef3df5cdeed0992da5f42fae9b807"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e344d0afdd7c06c1f1e66a4736593293f432defc2191e6b411fc9c82fa8c5adc"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-manylinux1_i686.whl", hash = "sha256:731a92dfc4022db763bfa835c6bd160f2d2cba6ada75749c2ed500e13983414b"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:c4726e36a2f7e92f09f5b8e92ba4db7525daffe31a0dcbcf0533edc0ade8c7d8"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:00e6cfce111883ca63a3c12878286e0b89871f4b840290e61fb6f88ee0e687be"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:cc7a26edf79015c58eea46feb5b262cece55bc1d4929a8a9e0cbe7e6d6a9b0eb"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:4955be64d943b30f2a7ff98d818ca530f7cb37450bc6b32c37e0e74821907ef8"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:af039afc6d787502c02089759778b550cb2f25dbe2780f5b050a2e37031c3fbf"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc15a7c7a99aed7d0831eaf78a607f1db0c7a255f96e3d18984231acd72f70c"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e97c138d811e9367723fcd07c4402a9211caae20479fdd6301d57762778a69f"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebcc145c74d06296ce0cad35992185064e5cb2aadef719586778c144f0cd4d37"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:664c64b6bdb31aceb80f0556951e5e2bf50d359270732268b4e7af00a1cf5d6c"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4056bc421d4df2c61db4e584415f2b0f1eebb92cbf9222f7f38303467c37117"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-win32.whl", hash = "sha256:cdbea2aac1a4caa66ee912af3601557d2bda2f9f69feec83601c78c7e53ece64"},
|
||||
{file = "pymongo-4.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:6cec7279e5a1b74b257d0270a8c97943d745811066630a6bc6beb413c68c6a33"},
|
||||
{file = "pymongo-4.6.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:138b9fa18d40401c217bc038a48bcde4160b02d36d8632015b1804971a2eaa2f"},
|
||||
{file = "pymongo-4.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60931b0e07448afe8866ffff764cd5bf4b1a855dc84c7dcb3974c6aa6a377a59"},
|
||||
{file = "pymongo-4.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9b35f8bded43ff91475305445fedf0613f880ff7e25c75ae1028e1260a9b7a86"},
|
||||
{file = "pymongo-4.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:872bad5c83f7eec9da11e1fef5f858c6a4c79fe4a83c7780e7b0fe95d560ae3f"},
|
||||
{file = "pymongo-4.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2ad3e5bfcd345c0bfe9af69a82d720860b5b043c1657ffb513c18a0dee19c19"},
|
||||
{file = "pymongo-4.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e208f2ab7b495eff8fd175022abfb0abce6307ac5aee3f4de51fc1a459b71c9"},
|
||||
{file = "pymongo-4.6.3-cp311-cp311-win32.whl", hash = "sha256:4670edbb5ddd71a4d555668ef99b032a5f81b59e4145d66123aa0d831eac7883"},
|
||||
{file = "pymongo-4.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:1c2761302b6cbfd12e239ce1b8061d4cf424a361d199dcb32da534985cae9350"},
|
||||
{file = "pymongo-4.6.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:722f2b709b63311c0efda4fa4c603661faa4bec6bad24a6cc41a3bc6d841bf09"},
|
||||
{file = "pymongo-4.6.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:994386a4d6ad39e18bcede6dc8d1d693ec3ed897b88f86b1841fbc37227406da"},
|
||||
{file = "pymongo-4.6.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:391aea047bba928006114282f175bc8d09c53fe1b7d8920bf888325e229302fe"},
|
||||
{file = "pymongo-4.6.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4330c022024e7994b630199cdae909123e4b0e9cf15335de71b146c0f6a2435"},
|
||||
{file = "pymongo-4.6.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01277a7e183c59081368e4efbde2b8f577014431b257959ca98d3a4e8682dd51"},
|
||||
{file = "pymongo-4.6.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d30d5d7963453b478016bf7b0d87d7089ca24d93dbdecfbc9aa32f1b4772160a"},
|
||||
{file = "pymongo-4.6.3-cp312-cp312-win32.whl", hash = "sha256:a023804a3ac0f85d4510265b60978522368b5815772262e61e3a2222a8b315c9"},
|
||||
{file = "pymongo-4.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:2a6ae9a600bbc2dbff719c98bf5da584fb8a4f2bb23729a09be2e9c3dbc61c8a"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:3b909e5b1864de01510079b39bbdc480720c37747be5552b354bc73f02c24a3c"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:48c60bd32ec141c0d45d8471179430003d9fb4490da181b8165fb1dce9cc255c"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:36d7049fc183fe4edda3eae7f66ea14c660921429e082fe90b4b7f4dc6664a70"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:18e5c161b18660f1c9d1f78236de45520a436be65e42b7bb51f25f74ad22bdde"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:e458e6fc2b7dd40d15cda04898bd2d8c9ff7ae086c516bc261628d54eb4e3158"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:e420e74c6db4594a6d09f39b58c0772679006cb0b4fc40901ba608794d87dad2"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:9c9340c7161e112e36ebb97fbba1cdbe7db3dfacb694d2918b1f155a01f3d859"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:26d036e0f5de09d0b21d0fc30314fcf2ae6359e4d43ae109aa6cf27b4ce02d30"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7cf28d9c90e40d4e385b858e4095739829f466f23e08674085161d86bb4bb10"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9066dff9dc0a182478ca5885d0b8a2b820b462e19459ada109df7a3ced31b272"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1e1586ebdebe0447a24842480defac17c496430a218486c96e2da3f164c0f05"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3853fb66bf34ce1b6e573e1bbb3cb28763be9d1f57758535757faf1ab2f24a"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:462684a6f5ce6f2661c30eab4d1d459231e0eed280f338e716e31a24fc09ccb3"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a4ea44e5a913bdb7c9abd34c69e9fcfac10dfaf49765463e0dc1ea922dd2a9d"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:098d420a8214ad25f872de7e8b309441995d12ece0376218a04d9ed5d2222cf3"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-win32.whl", hash = "sha256:7330245253fbe2e09845069d2f4d35dd27f63e377034c94cb0ddac18bc8b0d82"},
|
||||
{file = "pymongo-4.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:151361c101600a85cb1c1e0db4e4b28318b521fcafa9b62d389f7342faaaee80"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:4d167d546352869125dc86f6fda6dffc627d8a9c8963eaee665825f2520d542b"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:eaf3d594ebfd5e1f3503d81e06a5d78e33cda27418b36c2491c3d4ad4fca5972"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7ee79e02a7c5ed34706ecb5dad19e6c7d267cf86d28c075ef3127c58f3081279"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af5c5112db04cf62a5d9d224a24f289aaecb47d152c08a457cca81cee061d5bd"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:6b5aec78aa4840e8d6c3881900259892ab5733a366696ca10d99d68c3d73eaaf"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:9757602fb45c8ecc1883fe6db7c59c19d87eb3c645ec9342d28a6026837da931"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:dde9fb6e105ce054339256a8b7a9775212ebb29596ef4e402d7bbc63b354d202"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:7df8b166d3db6cfead4cf55b481408d8f0935d8bd8d6dbf64507c49ef82c7200"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53451190b8628e1ce7d1fe105dc376c3f10705127bd3b51fe3e107b9ff1851e6"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75107a386d4ccf5291e75cce8ca3898430e7907f4cc1208a17c9efad33a1ea84"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4a0660ce32d8459b7f12dc3ca0141528fead62d3cce31b548f96f30902074cc0"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa310096450e9c461b7dfd66cbc1c41771fe36c06200440bb3e062b1d4a06b6e"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f465cca9b178e7bb782f952dd58e9e92f8ba056e585959465f2bb50feddef5f"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c67c19f653053ef2ebd7f1837c2978400058d6d7f66ec5760373a21eaf660158"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c701de8e483fb5e53874aab642235361aac6de698146b02c644389eaa8c137b6"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-win32.whl", hash = "sha256:90525454546536544307e6da9c81f331a71a1b144e2d038fec587cc9f9250285"},
|
||||
{file = "pymongo-4.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:3e1ba5a037c526a3f4060c28f8d45d71ed9626e2bf954b0cd9a8dcc3b45172ee"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:14a82593528cddc93cfea5ee78fac95ae763a3a4e124ca79ee0b24fbbc6da1c9"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:cd6c15242d9306ff1748681c3235284cbe9f807aeaa86cd17d85e72af626e9a7"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6de33f1b2eed91b802ec7abeb92ffb981d052f3604b45588309aae9e0f6e3c02"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0182899aafe830f25cf96c5976d724efeaaf7b6646c15424ad8dd25422b2efe1"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:8d0ea740a2faa56f930dc82c5976d96c017ece26b29a1cddafb58721c7aab960"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:5c8a4982f5eb767c6fbfb8fb378683d09bcab7c3251ba64357eef600d43f6c23"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:becfa816545a48c8e740ac2fd624c1c121e1362072d68ffcf37a6b1be8ea187e"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ff7d1f449fcad23d9bc8e8dc2b9972be38bcd76d99ea5f7d29b2efa929c2a7ff"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e097f877de4d6af13a33ef938bf2a2350f424be5deabf8b857da95f5b080487a"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:705a9bfd619301ee7e985d6f91f68b15dfcb2f6f36b8cc225cc82d4260d2bce5"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ef1b4992ee1cb8bb16745e70afa0c02c5360220a7a8bb4775888721f052d0a6"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d10bdd46cbc35a2109737d36ffbef32e7420569a87904738ad444ccb7ac2c5"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17c1c143ba77d6e21fc8b48e93f0a5ed982a23447434e9ee4fbb6d633402506b"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e51e30d67b468a2a634ade928b30cb3e420127f148a9aec60de33f39087bdc4"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bec8e4e88984be157408f1923d25869e1b575c07711cdbdde596f66931800934"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-win32.whl", hash = "sha256:98877a9c4ad42df8253a12d8d17a3265781d1feb5c91c767bd153f88feb0b670"},
|
||||
{file = "pymongo-4.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:6d5b35da9e16cda630baed790ffc3d0d01029d269523a7cec34d2ec7e6823e75"},
|
||||
{file = "pymongo-4.6.3.tar.gz", hash = "sha256:400074090b9a631f120b42c61b222fd743490c133a5d2f99c0208cefcccc964e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3475,10 +3460,11 @@ dnspython = ">=1.16.0,<3.0.0"
|
||||
|
||||
[package.extras]
|
||||
aws = ["pymongo-auth-aws (<2.0.0)"]
|
||||
encryption = ["pymongo-auth-aws (<2.0.0)", "pymongocrypt (>=1.3.0,<2.0.0)"]
|
||||
gssapi = ["pykerberos"]
|
||||
ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"]
|
||||
encryption = ["certifi", "pymongo[aws]", "pymongocrypt (>=1.6.0,<2.0.0)"]
|
||||
gssapi = ["pykerberos", "winkerberos (>=0.5.0)"]
|
||||
ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"]
|
||||
snappy = ["python-snappy"]
|
||||
test = ["pytest (>=7)"]
|
||||
zstd = ["zstandard"]
|
||||
|
||||
[[package]]
|
||||
@@ -3550,24 +3536,42 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyodbc"
|
||||
version = "4.0.28"
|
||||
description = "DB API Module for ODBC"
|
||||
version = "5.1.0"
|
||||
description = "DB API module for ODBC"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pyodbc-4.0.28-cp27-cp27m-win32.whl", hash = "sha256:2217eb01091a207a9ffa457c49a63a1d0eb8514c810a23b901518348422fcf65"},
|
||||
{file = "pyodbc-4.0.28-cp27-cp27m-win_amd64.whl", hash = "sha256:ae35c455bfbadc631ee20df6657bfda0779bdc80badfd9d13741433dd78785e6"},
|
||||
{file = "pyodbc-4.0.28-cp27-none-macosx_10_15_x86_64.whl", hash = "sha256:f37f26ae909101465a085ef51b9dde35afc93b7c7e38c25b61b124b110aa9998"},
|
||||
{file = "pyodbc-4.0.28-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5d1abca8f5bdab1515e300d05c63c25d072a123c7089a554290b5b9e83168eb6"},
|
||||
{file = "pyodbc-4.0.28-cp36-cp36m-win32.whl", hash = "sha256:c25e525e0576b1dfa067d3a6530e046a24006d89715026d2d5dbf6d4290093b9"},
|
||||
{file = "pyodbc-4.0.28-cp36-cp36m-win_amd64.whl", hash = "sha256:259b2554d2b8c9a6247871fec741b526f0b63a0e42676bd8f210e214a3015129"},
|
||||
{file = "pyodbc-4.0.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ad9aa2a851242109141e4275c2a9b4d4379e00288959acd877501ee90aa3955"},
|
||||
{file = "pyodbc-4.0.28-cp37-cp37m-win32.whl", hash = "sha256:2908f73e5a374437fd7a38f14b09f2b96d742235bf2f819fb697f8922e35ddda"},
|
||||
{file = "pyodbc-4.0.28-cp37-cp37m-win_amd64.whl", hash = "sha256:a1a1687edef4319ae533e1d789c6c8241459f04af9e4db76e6e4045c530239de"},
|
||||
{file = "pyodbc-4.0.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4f3c788d231907f45ea329cd245b398b165d9d28809f55814240eea775a6b1cd"},
|
||||
{file = "pyodbc-4.0.28-cp38-cp38-win32.whl", hash = "sha256:93e495c51a5db027c2f7ee2c2c3fe9d6ea86b3a61392c7c8961a1818951868c8"},
|
||||
{file = "pyodbc-4.0.28-cp38-cp38-win_amd64.whl", hash = "sha256:49ba851be2d9d07cc1472b43febc93e3362c1e09ceb3eac84693a6690d090165"},
|
||||
{file = "pyodbc-4.0.28.tar.gz", hash = "sha256:510643354c4c687ed96bf7e7cec4d02d6c626ecf3e18696f5a0228dd6d11b769"},
|
||||
{file = "pyodbc-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:02fe9821711a2d14415eaeb4deab471d2c8b7034b107e524e414c0e133c42248"},
|
||||
{file = "pyodbc-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2cbdbd019756285dc44bc35238a3ed8dfaa454e8c8b2c3462f1710cfeebfb290"},
|
||||
{file = "pyodbc-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84df3bbce9bafe65abd25788d55c9f1da304f6115d70f25758ff8c85f3ce0517"},
|
||||
{file = "pyodbc-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:218bb75d4bc67075529a65ce8ec7daeed1d83c33dd7410450fbf68d43d184d28"},
|
||||
{file = "pyodbc-5.1.0-cp310-cp310-win32.whl", hash = "sha256:eae576b3b67d21d6f237e18bb5f3df8323a2258f52c3e3afeef79269704072a9"},
|
||||
{file = "pyodbc-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:c3b65343557f4c7753204e06f4c82c97ed212a636501f4bc27c5ce0e549eb3e8"},
|
||||
{file = "pyodbc-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa6f46377da303bf79bcb4b559899507df4b2559f30dcfdf191358ee4b99f3ab"},
|
||||
{file = "pyodbc-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b19d7f44cfee89901e482f554a88177e83fae76b03c3f830e0023a195d840220"},
|
||||
{file = "pyodbc-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c36448322f8d6479d87c528cf52401a6ea4f509b9637750b67340382b4e1b40"},
|
||||
{file = "pyodbc-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c5e0cb79222aad4b31a3602e39b242683c29c6221a16ed43f45f18fd0b73659"},
|
||||
{file = "pyodbc-5.1.0-cp311-cp311-win32.whl", hash = "sha256:92caed9d445815ed3f7e5a1249e29a4600ebc1e99404df81b6ed7671074c9227"},
|
||||
{file = "pyodbc-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a1bd14633e91b7a9814f4fd944c9ebb89fb7f1fd4710c4e3999b5ef041536347"},
|
||||
{file = "pyodbc-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d3d9cc4af703c4817b6e604315910b0cf5dcb68056d52b25ca072dd59c52dcbc"},
|
||||
{file = "pyodbc-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:406b8fa2133a7b6a713aa5187dba2d08cf763b5884606bed77610a7660fdfabe"},
|
||||
{file = "pyodbc-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8488c3818f12207650836c5c6f7352f9ff9f56a05a05512145995e497c0bbb1"},
|
||||
{file = "pyodbc-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0df69e3a500791b70b5748c68a79483b24428e4c16027b56aa0305e95c143a4"},
|
||||
{file = "pyodbc-5.1.0-cp312-cp312-win32.whl", hash = "sha256:aa4e02d3a9bf819394510b726b25f1566f8b3f0891ca400ad2d4c8b86b535b78"},
|
||||
{file = "pyodbc-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:33f4984af38872e7bdec78007a34e4d43ae72bf9d0bae3344e79d9d0db157c0e"},
|
||||
{file = "pyodbc-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29425e2d366e7f5828b76c7993f412a3db4f18bd5bcee00186c00b5a5965e205"},
|
||||
{file = "pyodbc-5.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a2bbd2e75c77dee9f3cd100c3246110abaeb9af3f7fa304ccc2934ff9c6a4fa4"},
|
||||
{file = "pyodbc-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3602136a936bc0c1bb9722eb2fbf2042b3ff1ddccdc4688e514b82d4b831563b"},
|
||||
{file = "pyodbc-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bed1c843565d3a4fd8c332ebceaf33efe817657a0505eacb97dd1b786a985b0b"},
|
||||
{file = "pyodbc-5.1.0-cp38-cp38-win32.whl", hash = "sha256:735f6da3762e5856b5580be0ed96bb946948346ebd1e526d5169a5513626a67a"},
|
||||
{file = "pyodbc-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c5bb4e43f6c72f5fa2c634570e0d761767d8ea49f39205229b812fb4d3fe05aa"},
|
||||
{file = "pyodbc-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33f0f1d7764cefef6f787936bd6359670828a6086be67518ab951f1f7f503cda"},
|
||||
{file = "pyodbc-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be3b1c36c31ec7d73d0b34a8ad8743573763fadd8f2bceef1e84408252b48dce"},
|
||||
{file = "pyodbc-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e71a51c252b503b4d753e21ed31e640015fc0d00202d42ea42f2396fcc924b4a"},
|
||||
{file = "pyodbc-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af5282cc8b667af97d76f4955250619a53f25486cbb6b1f45a06b781006ffa0b"},
|
||||
{file = "pyodbc-5.1.0-cp39-cp39-win32.whl", hash = "sha256:96b2a8dc27693a517e3aad3944a7faa8be95d40d7ec1eda51a1885162eedfa33"},
|
||||
{file = "pyodbc-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:e738c5eedb4a0cbab20cc008882f49b106054499db56864057c2530ff208cf32"},
|
||||
{file = "pyodbc-5.1.0.tar.gz", hash = "sha256:397feee44561a6580be08cedbe986436859563f4bb378f48224655c8e987ea60"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3839,6 +3843,7 @@ files = [
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
||||
@@ -3873,23 +3878,6 @@ files = [
|
||||
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "qds-sdk"
|
||||
version = "1.16.1"
|
||||
description = "Python SDK for coding to the Qubole Data Service API"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "qds_sdk-1.16.1.tar.gz", hash = "sha256:28850682afcf3ab0f2a74a9fd442715519db3ee2ba91c7ecb0b1a56773748ffd"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
boto = ">=2.45.0"
|
||||
inflection = "0.3.1"
|
||||
requests = ">=1.0.3"
|
||||
six = ">=1.2.0"
|
||||
urllib3 = ">=1.0.2"
|
||||
|
||||
[[package]]
|
||||
name = "rdflib"
|
||||
version = "6.3.2"
|
||||
@@ -4042,13 +4030,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.31.0"
|
||||
version = "2.32.0"
|
||||
description = "Python HTTP for Humans."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
|
||||
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
|
||||
{file = "requests-2.32.0-py3-none-any.whl", hash = "sha256:f2c3881dddb70d056c5bd7600a4fae312b2a300e39be6a118d30b90bd27262b5"},
|
||||
{file = "requests-2.32.0.tar.gz", hash = "sha256:fa5490319474c82ef1d2c9bc459d3652e3ae4ef4c4ebdd18a21145a47ca4b6b8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4161,32 +4149,33 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||
|
||||
[[package]]
|
||||
name = "rq"
|
||||
version = "1.9.0"
|
||||
version = "1.16.1"
|
||||
description = "RQ is a simple, lightweight, library for creating background jobs, and processing them."
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "rq-1.9.0-py2.py3-none-any.whl", hash = "sha256:7af1e9706dbe6f1eac16dffacd8271ec27c1369950941f14dab6bb08a62979d7"},
|
||||
{file = "rq-1.9.0.tar.gz", hash = "sha256:bdfef943de838955e474cfd0e25b9b8c53ed4b9c361fe4bb11cf56d17a87acc5"},
|
||||
{file = "rq-1.16.1-py3-none-any.whl", hash = "sha256:273de33f10bb9f18cd1e8ccc0a4e8dba2b8eb86a6ab2a91ae674f99bd68025f1"},
|
||||
{file = "rq-1.16.1.tar.gz", hash = "sha256:d9a6314bc759a743b4a5d89aa467eaa3a31dbbc0a34bcd0ee82e8852d9ec166d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=5.0.0"
|
||||
redis = ">=3.5.0"
|
||||
click = ">=5"
|
||||
redis = ">=3.5"
|
||||
|
||||
[[package]]
|
||||
name = "rq-scheduler"
|
||||
version = "0.11.0"
|
||||
version = "0.13.1"
|
||||
description = "Provides job scheduling capabilities to RQ (Redis Queue)"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "rq-scheduler-0.11.0.tar.gz", hash = "sha256:db79bb56cdbc4f7ffdd8bd659e389e91aa0db9c1abf002dc46f5dd6f0dbd2910"},
|
||||
{file = "rq_scheduler-0.11.0-py2.py3-none-any.whl", hash = "sha256:da94e9b6badf112995ff38fe16192e4f4c43c412b3c9614684ed8c8f7ca517d2"},
|
||||
{file = "rq-scheduler-0.13.1.tar.gz", hash = "sha256:89d6a18f215536362b22c0548db7dbb8678bc520c18dc18a82fd0bb2b91695ce"},
|
||||
{file = "rq_scheduler-0.13.1-py2.py3-none-any.whl", hash = "sha256:c2b19c3aedfc7de4d405183c98aa327506e423bf4cdc556af55aaab9bbe5d1a1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
croniter = ">=0.3.9"
|
||||
crontab = ">=0.23.0"
|
||||
freezegun = "*"
|
||||
python-dateutil = "*"
|
||||
rq = ">=0.13"
|
||||
|
||||
@@ -4622,49 +4611,46 @@ test = ["flake8 (>=2.4.0)", "isort (>=3.9.6)", "psycopg2 (>=2.4.6)", "pytest (>=
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy-utils"
|
||||
version = "0.34.2"
|
||||
version = "0.38.3"
|
||||
description = "Various utility functions for SQLAlchemy."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = "~=3.6"
|
||||
files = [
|
||||
{file = "SQLAlchemy-Utils-0.34.2.tar.gz", hash = "sha256:6689b29d7951c5c7c4d79fa6b8c95f9ff9ec708b07aa53f82060599bd14dcc88"},
|
||||
{file = "SQLAlchemy-Utils-0.38.3.tar.gz", hash = "sha256:9f9afba607a40455cf703adfa9846584bf26168a0c5a60a70063b70d65051f4d"},
|
||||
{file = "SQLAlchemy_Utils-0.38.3-py3-none-any.whl", hash = "sha256:5c13b5d08adfaa85f3d4e8ec09a75136216fad41346980d02974a70a77988bf9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = "*"
|
||||
SQLAlchemy = ">=1.0"
|
||||
SQLAlchemy = ">=1.3"
|
||||
|
||||
[package.extras]
|
||||
anyjson = ["anyjson (>=0.3.3)"]
|
||||
arrow = ["arrow (>=0.3.4)"]
|
||||
babel = ["Babel (>=1.3)"]
|
||||
color = ["colour (>=0.0.4)"]
|
||||
encrypted = ["cryptography (>=0.6)"]
|
||||
enum = ["enum34"]
|
||||
intervals = ["intervals (>=0.7.1)"]
|
||||
ipaddress = ["ipaddr"]
|
||||
password = ["passlib (>=1.6,<2.0)"]
|
||||
pendulum = ["pendulum (>=2.0.5)"]
|
||||
phone = ["phonenumbers (>=5.9.2)"]
|
||||
test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "mock (==2.0.0)", "pg8000 (>=1.12.4)", "psycopg2 (>=2.5.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
|
||||
test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "anyjson (>=0.3.3)", "arrow (>=0.3.4)", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "enum34", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "ipaddr", "isort (>=4.2.2)", "mock (==2.0.0)", "passlib (>=1.6,<2.0)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg2 (>=2.5.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
|
||||
test = ["Jinja2 (>=2.3)", "Pygments (>=1.2)", "backports.zoneinfo", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "isort (>=4.2.2)", "pg8000 (>=1.12.4)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
|
||||
test-all = ["Babel (>=1.3)", "Jinja2 (>=2.3)", "Pygments (>=1.2)", "arrow (>=0.3.4)", "backports.zoneinfo", "colour (>=0.0.4)", "cryptography (>=0.6)", "docutils (>=0.10)", "flake8 (>=2.4.0)", "flexmock (>=0.9.7)", "furl (>=0.4.1)", "intervals (>=0.7.1)", "isort (>=4.2.2)", "passlib (>=1.6,<2.0)", "pendulum (>=2.0.5)", "pg8000 (>=1.12.4)", "phonenumbers (>=5.9.2)", "psycopg2 (>=2.5.1)", "psycopg2cffi (>=2.8.1)", "pymysql", "pyodbc", "pytest (>=2.7.1)", "python-dateutil", "python-dateutil (>=2.6)", "pytz (>=2014.2)"]
|
||||
timezone = ["python-dateutil"]
|
||||
url = ["furl (>=0.4.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "sqlparse"
|
||||
version = "0.4.4"
|
||||
version = "0.5.0"
|
||||
description = "A non-validating SQL parser."
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"},
|
||||
{file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"},
|
||||
{file = "sqlparse-0.5.0-py3-none-any.whl", hash = "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663"},
|
||||
{file = "sqlparse-0.5.0.tar.gz", hash = "sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["build", "flake8"]
|
||||
dev = ["build", "hatch"]
|
||||
doc = ["sphinx"]
|
||||
test = ["pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "sshtunnel"
|
||||
@@ -4925,13 +4911,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.26.18"
|
||||
version = "1.26.19"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||
files = [
|
||||
{file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"},
|
||||
{file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"},
|
||||
{file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"},
|
||||
{file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -5331,4 +5317,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.8,<3.11"
|
||||
content-hash = "e8bd51251218101afab4a1081b554c4c5d429b8e92a8193f807712d85bb5a0bf"
|
||||
content-hash = "9ab7b118403a9004893f4d955cf276fe0f231c2c2fa1f327dbffd7c81d2f4dd7"
|
||||
|
||||
@@ -12,7 +12,7 @@ force-exclude = '''
|
||||
|
||||
[tool.poetry]
|
||||
name = "redash"
|
||||
version = "24.03.0-dev"
|
||||
version = "24.07.0-dev"
|
||||
description = "Make Your Company Data Driven. Connect to any data source, easily visualize, dashboard and share your data."
|
||||
authors = ["Arik Fraimovich <arik@redash.io>"]
|
||||
# to be added to/removed from the mailing list, please reach out to Arik via the above email or Discord
|
||||
@@ -43,10 +43,10 @@ flask-wtf = "1.1.1"
|
||||
funcy = "1.13"
|
||||
gevent = "23.9.1"
|
||||
greenlet = "2.0.2"
|
||||
gunicorn = "20.0.4"
|
||||
gunicorn = "22.0.0"
|
||||
httplib2 = "0.19.0"
|
||||
itsdangerous = "2.1.2"
|
||||
jinja2 = "3.1.3"
|
||||
jinja2 = "3.1.4"
|
||||
jsonschema = "3.1.1"
|
||||
markupsafe = "2.1.1"
|
||||
maxminddb-geolite2 = "2018.703"
|
||||
@@ -64,27 +64,28 @@ pytz = ">=2019.3"
|
||||
pyyaml = "6.0.1"
|
||||
redis = "4.6.0"
|
||||
regex = "2023.8.8"
|
||||
requests = "2.31.0"
|
||||
requests = "2.32.0"
|
||||
restrictedpython = "6.2"
|
||||
rq = "1.9.0"
|
||||
rq-scheduler = "0.11.0"
|
||||
rq = "1.16.1"
|
||||
rq-scheduler = "0.13.1"
|
||||
semver = "2.8.1"
|
||||
sentry-sdk = "1.28.1"
|
||||
sqlalchemy = "1.3.24"
|
||||
sqlalchemy-searchable = "1.2.0"
|
||||
sqlalchemy-utils = "0.34.2"
|
||||
sqlparse = "0.4.4"
|
||||
sqlalchemy-utils = "0.38.3"
|
||||
sqlparse = "0.5.0"
|
||||
sshtunnel = "0.1.5"
|
||||
statsd = "3.3.0"
|
||||
supervisor = "4.1.0"
|
||||
supervisor-checks = "0.8.1"
|
||||
ua-parser = "0.18.0"
|
||||
urllib3 = "1.26.18"
|
||||
urllib3 = "1.26.19"
|
||||
user-agents = "2.0"
|
||||
werkzeug = "2.3.8"
|
||||
wtforms = "2.2.1"
|
||||
xlsxwriter = "1.2.2"
|
||||
tzlocal = "4.3.1"
|
||||
pyodbc = "5.1.0"
|
||||
|
||||
[tool.poetry.group.all_ds]
|
||||
optional = true
|
||||
@@ -110,7 +111,7 @@ nzalchemy = "^11.0.2"
|
||||
nzpy = ">=1.15"
|
||||
oauth2client = "4.1.3"
|
||||
openpyxl = "3.0.7"
|
||||
oracledb = "2.0.1"
|
||||
oracledb = "2.1.2"
|
||||
pandas = "1.3.4"
|
||||
phoenixdb = "0.7"
|
||||
pinotdb = ">=0.4.5"
|
||||
@@ -121,12 +122,11 @@ pydruid = "0.5.7"
|
||||
pyexasol = "0.12.0"
|
||||
pyhive = "0.6.1"
|
||||
pyignite = "0.6.1"
|
||||
pymongo = { version = "4.3.3", extras = ["srv", "tls"] }
|
||||
pymongo = { version = "4.6.3", extras = ["srv", "tls"] }
|
||||
pymssql = "2.2.8"
|
||||
pyodbc = "4.0.28"
|
||||
pyodbc = "5.1.0"
|
||||
python-arango = "6.1.0"
|
||||
python-rapidjson = "1.1.0"
|
||||
qds-sdk = ">=1.9.6"
|
||||
requests-aws-sign = "0.1.5"
|
||||
sasl = ">=0.1.3"
|
||||
simple-salesforce = "0.74.3"
|
||||
@@ -152,7 +152,7 @@ optional = true
|
||||
pytest = "7.4.0"
|
||||
coverage = "7.2.7"
|
||||
freezegun = "1.2.1"
|
||||
jwcrypto = "1.5.1"
|
||||
jwcrypto = "1.5.6"
|
||||
mock = "5.0.2"
|
||||
pre-commit = "3.3.3"
|
||||
ptpython = "3.0.23"
|
||||
|
||||
@@ -14,7 +14,7 @@ from redash.app import create_app # noqa
|
||||
from redash.destinations import import_destinations
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = "24.03.0-dev"
|
||||
__version__ = "24.07.0-dev"
|
||||
|
||||
|
||||
if os.environ.get("REMOTE_DEBUG"):
|
||||
|
||||
@@ -36,14 +36,10 @@ def create_app():
|
||||
from .metrics import request as request_metrics
|
||||
from .models import db, users
|
||||
from .utils import sentry
|
||||
from .version_check import reset_new_version_status
|
||||
|
||||
sentry.init()
|
||||
app = Redash()
|
||||
|
||||
# Check and update the cached version for use by the client
|
||||
reset_new_version_status()
|
||||
|
||||
security.init_app(app)
|
||||
request_metrics.init_app(app)
|
||||
db.init_app(app)
|
||||
|
||||
@@ -8,6 +8,7 @@ from redash import settings
|
||||
|
||||
try:
|
||||
from ldap3 import Connection, Server
|
||||
from ldap3.utils.conv import escape_filter_chars
|
||||
except ImportError:
|
||||
if settings.LDAP_LOGIN_ENABLED:
|
||||
sys.exit(
|
||||
@@ -69,6 +70,7 @@ def login(org_slug=None):
|
||||
|
||||
|
||||
def auth_ldap_user(username, password):
|
||||
clean_username = escape_filter_chars(username)
|
||||
server = Server(settings.LDAP_HOST_URL, use_ssl=settings.LDAP_SSL)
|
||||
if settings.LDAP_BIND_DN is not None:
|
||||
conn = Connection(
|
||||
@@ -83,7 +85,7 @@ def auth_ldap_user(username, password):
|
||||
|
||||
conn.search(
|
||||
settings.LDAP_SEARCH_DN,
|
||||
settings.LDAP_SEARCH_TEMPLATE % {"username": username},
|
||||
settings.LDAP_SEARCH_TEMPLATE % {"username": clean_username},
|
||||
attributes=[settings.LDAP_DISPLAY_NAME_KEY, settings.LDAP_EMAIL_KEY],
|
||||
)
|
||||
|
||||
|
||||
@@ -26,13 +26,13 @@ class Slack(BaseDestination):
|
||||
fields = [
|
||||
{
|
||||
"title": "Query",
|
||||
"type": "mrkdwn",
|
||||
"value": "{host}/queries/{query_id}".format(host=host, query_id=query.id),
|
||||
"short": True,
|
||||
},
|
||||
{
|
||||
"title": "Alert",
|
||||
"type": "mrkdwn",
|
||||
"value": "{host}/alerts/{alert_id}".format(host=host, alert_id=alert.id),
|
||||
"short": True,
|
||||
},
|
||||
]
|
||||
if alert.custom_body:
|
||||
|
||||
@@ -15,7 +15,6 @@ from redash.authentication.account import (
|
||||
)
|
||||
from redash.handlers import routes
|
||||
from redash.handlers.base import json_response, org_scoped_rule
|
||||
from redash.version_check import get_latest_version
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -256,15 +255,11 @@ def number_format_config():
|
||||
|
||||
def client_config():
|
||||
if not current_user.is_api_user() and current_user.is_authenticated:
|
||||
client_config = {
|
||||
"newVersionAvailable": bool(get_latest_version()),
|
||||
client_config_inner = {
|
||||
"version": __version__,
|
||||
}
|
||||
else:
|
||||
client_config = {}
|
||||
|
||||
if current_user.has_permission("admin") and current_org.get_setting("beacon_consent") is None:
|
||||
client_config["showBeaconConsentMessage"] = True
|
||||
client_config_inner = {}
|
||||
|
||||
defaults = {
|
||||
"allowScriptsInUserInput": settings.ALLOW_SCRIPTS_IN_USER_INPUT,
|
||||
@@ -284,12 +279,12 @@ def client_config():
|
||||
"tableCellMaxJSONSize": settings.TABLE_CELL_MAX_JSON_SIZE,
|
||||
}
|
||||
|
||||
client_config.update(defaults)
|
||||
client_config.update({"basePath": base_href()})
|
||||
client_config.update(date_time_format_config())
|
||||
client_config.update(number_format_config())
|
||||
client_config_inner.update(defaults)
|
||||
client_config_inner.update({"basePath": base_href()})
|
||||
client_config_inner.update(date_time_format_config())
|
||||
client_config_inner.update(number_format_config())
|
||||
|
||||
return client_config
|
||||
return client_config_inner
|
||||
|
||||
|
||||
def messages():
|
||||
|
||||
@@ -7,13 +7,13 @@ from flask_restful import Resource, abort
|
||||
from sqlalchemy import cast
|
||||
from sqlalchemy.dialects.postgresql import ARRAY
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from sqlalchemy_utils.functions import sort_query
|
||||
|
||||
from redash import settings
|
||||
from redash.authentication import current_org
|
||||
from redash.models import db
|
||||
from redash.tasks import record_event as record_event_task
|
||||
from redash.utils import json_dumps
|
||||
from redash.utils.query_order import sort_query
|
||||
|
||||
routes = Blueprint("redash", __name__, template_folder=settings.fix_assets_path("templates"))
|
||||
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
from flask import g, redirect, render_template, request, url_for
|
||||
from flask_login import login_user
|
||||
from wtforms import BooleanField, Form, PasswordField, StringField, validators
|
||||
from wtforms import Form, PasswordField, StringField, validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
|
||||
from redash import settings
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import routes
|
||||
from redash.models import Group, Organization, User, db
|
||||
from redash.tasks.general import subscribe
|
||||
|
||||
|
||||
class SetupForm(Form):
|
||||
@@ -15,8 +14,6 @@ class SetupForm(Form):
|
||||
email = EmailField("Email Address", validators=[validators.Email()])
|
||||
password = PasswordField("Password", validators=[validators.Length(6)])
|
||||
org_name = StringField("Organization Name", validators=[validators.InputRequired()])
|
||||
security_notifications = BooleanField()
|
||||
newsletter = BooleanField()
|
||||
|
||||
|
||||
def create_org(org_name, user_name, email, password):
|
||||
@@ -57,8 +54,6 @@ def setup():
|
||||
return redirect("/")
|
||||
|
||||
form = SetupForm(request.form)
|
||||
form.newsletter.data = True
|
||||
form.security_notifications.data = True
|
||||
|
||||
if request.method == "POST" and form.validate():
|
||||
default_org, user = create_org(form.org_name.data, form.name.data, form.email.data, form.password.data)
|
||||
@@ -66,10 +61,6 @@ def setup():
|
||||
g.org = default_org
|
||||
login_user(user)
|
||||
|
||||
# signup to newsletter if needed
|
||||
if form.newsletter.data or form.security_notifications:
|
||||
subscribe.delay(form.data)
|
||||
|
||||
return redirect(url_for("redash.index", org_slug=None))
|
||||
|
||||
return render_template("setup.html", form=form)
|
||||
|
||||
@@ -42,7 +42,6 @@ class WidgetListResource(BaseResource):
|
||||
|
||||
widget = models.Widget(**widget_properties)
|
||||
models.db.session.add(widget)
|
||||
models.db.session.commit()
|
||||
|
||||
models.db.session.commit()
|
||||
return serialize_widget(widget)
|
||||
|
||||
@@ -578,7 +578,8 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
return [
|
||||
query
|
||||
for query in queries
|
||||
if query.schedule["until"] is not None
|
||||
if "until" in query.schedule
|
||||
and query.schedule["until"] is not None
|
||||
and pytz.utc.localize(datetime.datetime.strptime(query.schedule["until"], "%Y-%m-%d")) <= now
|
||||
]
|
||||
|
||||
|
||||
@@ -63,5 +63,8 @@ class AmazonElasticsearchService(ElasticSearch2):
|
||||
|
||||
self.auth = AWSV4Sign(cred, region, "es")
|
||||
|
||||
def get_auth(self):
|
||||
return self.auth
|
||||
|
||||
|
||||
register(AmazonElasticsearchService)
|
||||
|
||||
@@ -100,7 +100,7 @@ class BigQuery(BaseQueryRunner):
|
||||
|
||||
def __init__(self, configuration):
|
||||
super().__init__(configuration)
|
||||
self.should_annotate_query = configuration["useQueryAnnotation"]
|
||||
self.should_annotate_query = configuration.get("useQueryAnnotation", False)
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
|
||||
@@ -121,7 +121,7 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
verify=verify,
|
||||
)
|
||||
|
||||
if r.status_code != 200:
|
||||
if not r.ok:
|
||||
raise Exception(r.text)
|
||||
|
||||
# In certain situations the response body can be empty even if the query was successful, for example
|
||||
@@ -129,7 +129,11 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
if not r.text:
|
||||
return {}
|
||||
|
||||
return r.json()
|
||||
response = r.json()
|
||||
if "exception" in response:
|
||||
raise Exception(response["exception"])
|
||||
|
||||
return response
|
||||
except requests.RequestException as e:
|
||||
if e.response:
|
||||
details = "({}, Status Code: {})".format(e.__class__.__name__, e.response.status_code)
|
||||
|
||||
@@ -129,6 +129,8 @@ class BaseElasticSearch(BaseQueryRunner):
|
||||
for index_name in mappings_data:
|
||||
index_mappings = mappings_data[index_name]
|
||||
for m in index_mappings.get("mappings", {}):
|
||||
if not isinstance(index_mappings["mappings"][m], dict):
|
||||
continue
|
||||
if "properties" not in index_mappings["mappings"][m]:
|
||||
continue
|
||||
for property_name in index_mappings["mappings"][m]["properties"]:
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional, Tuple
|
||||
|
||||
@@ -45,7 +46,7 @@ class ElasticSearch2(BaseHTTPQueryRunner):
|
||||
self.syntax = "json"
|
||||
|
||||
def get_response(self, url, auth=None, http_method="get", **kwargs):
|
||||
url = "{}{}".format(self.configuration["url"], url)
|
||||
url = "{}{}".format(self.configuration["server"], url)
|
||||
headers = kwargs.pop("headers", {})
|
||||
headers["Accept"] = "application/json"
|
||||
return super().get_response(url, auth, http_method, headers=headers, **kwargs)
|
||||
@@ -64,6 +65,7 @@ class ElasticSearch2(BaseHTTPQueryRunner):
|
||||
return data, error
|
||||
|
||||
def _build_query(self, query: str) -> Tuple[dict, str, Optional[list]]:
|
||||
query = json.loads(query)
|
||||
index_name = query.pop("index", "")
|
||||
result_fields = query.pop("result_fields", None)
|
||||
url = "/{}/_search".format(index_name)
|
||||
|
||||
@@ -69,7 +69,7 @@ def datetime_parser(dct):
|
||||
return bson_object_hook(dct, json_options=opts)
|
||||
|
||||
|
||||
def parse_query_json(query):
|
||||
def parse_query_json(query: str):
|
||||
query_data = json_loads(query, object_hook=datetime_parser)
|
||||
return query_data
|
||||
|
||||
@@ -82,26 +82,40 @@ def _get_column_by_name(columns, column_name):
|
||||
return None
|
||||
|
||||
|
||||
def _parse_dict(dic):
|
||||
def _parse_dict(dic: dict, flatten: bool = False) -> dict:
|
||||
res = {}
|
||||
for key, value in dic.items():
|
||||
if isinstance(value, dict):
|
||||
for tmp_key, tmp_value in _parse_dict(value).items():
|
||||
new_key = "{}.{}".format(key, tmp_key)
|
||||
res[new_key] = tmp_value
|
||||
|
||||
def _flatten(x, name=""):
|
||||
if isinstance(x, dict):
|
||||
for k, v in x.items():
|
||||
_flatten(v, "{}.{}".format(name, k))
|
||||
elif isinstance(x, list):
|
||||
for idx, item in enumerate(x):
|
||||
_flatten(item, "{}.{}".format(name, idx))
|
||||
else:
|
||||
res[key] = value
|
||||
res[name[1:]] = x
|
||||
|
||||
if flatten:
|
||||
_flatten(dic)
|
||||
else:
|
||||
for key, value in dic.items():
|
||||
if isinstance(value, dict):
|
||||
for tmp_key, tmp_value in _parse_dict(value).items():
|
||||
new_key = "{}.{}".format(key, tmp_key)
|
||||
res[new_key] = tmp_value
|
||||
else:
|
||||
res[key] = value
|
||||
return res
|
||||
|
||||
|
||||
def parse_results(results):
|
||||
def parse_results(results: list, flatten: bool = False) -> list:
|
||||
rows = []
|
||||
columns = []
|
||||
|
||||
for row in results:
|
||||
parsed_row = {}
|
||||
|
||||
parsed_row = _parse_dict(row)
|
||||
parsed_row = _parse_dict(row, flatten)
|
||||
for column_name, value in parsed_row.items():
|
||||
columns.append(
|
||||
{
|
||||
@@ -140,6 +154,14 @@ class MongoDB(BaseQueryRunner):
|
||||
],
|
||||
"title": "Replica Set Read Preference",
|
||||
},
|
||||
"flatten": {
|
||||
"type": "string",
|
||||
"extendedEnum": [
|
||||
{"value": "False", "name": "False"},
|
||||
{"value": "True", "name": "True"},
|
||||
],
|
||||
"title": "Flatten Results",
|
||||
},
|
||||
},
|
||||
"secret": ["password"],
|
||||
"required": ["connectionString", "dbName"],
|
||||
@@ -160,6 +182,9 @@ class MongoDB(BaseQueryRunner):
|
||||
True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
|
||||
)
|
||||
|
||||
self.flatten = self.configuration.get("flatten", "False").upper() in ["TRUE", "YES", "ON", "1", "Y", "T"]
|
||||
logger.debug("flatten: {}".format(self.flatten))
|
||||
|
||||
@classmethod
|
||||
def custom_json_encoder(cls, dec, o):
|
||||
if isinstance(o, ObjectId):
|
||||
@@ -278,8 +303,10 @@ class MongoDB(BaseQueryRunner):
|
||||
if "$sort" in step:
|
||||
sort_list = []
|
||||
for sort_item in step["$sort"]:
|
||||
sort_list.append((sort_item["name"], sort_item["direction"]))
|
||||
|
||||
if isinstance(sort_item, dict):
|
||||
sort_list.append((sort_item["name"], sort_item.get("direction", 1)))
|
||||
elif isinstance(sort_item, list):
|
||||
sort_list.append(tuple(sort_item))
|
||||
step["$sort"] = SON(sort_list)
|
||||
|
||||
if "fields" in query_data:
|
||||
@@ -289,7 +316,10 @@ class MongoDB(BaseQueryRunner):
|
||||
if "sort" in query_data and query_data["sort"]:
|
||||
s = []
|
||||
for field_data in query_data["sort"]:
|
||||
s.append((field_data["name"], field_data["direction"]))
|
||||
if isinstance(field_data, dict):
|
||||
s.append((field_data["name"], field_data.get("direction", 1)))
|
||||
elif isinstance(field_data, list):
|
||||
s.append(tuple(field_data))
|
||||
|
||||
columns = []
|
||||
rows = []
|
||||
@@ -330,7 +360,7 @@ class MongoDB(BaseQueryRunner):
|
||||
|
||||
rows.append({"count": cursor})
|
||||
else:
|
||||
rows, columns = parse_results(cursor)
|
||||
rows, columns = parse_results(cursor, flatten=self.flatten)
|
||||
|
||||
if f:
|
||||
ordered_columns = []
|
||||
@@ -340,6 +370,7 @@ class MongoDB(BaseQueryRunner):
|
||||
ordered_columns.append(column)
|
||||
|
||||
columns = ordered_columns
|
||||
logger.debug("columns: {}".format(columns))
|
||||
|
||||
if query_data.get("sortColumns"):
|
||||
reverse = query_data["sortColumns"] == "desc"
|
||||
|
||||
@@ -48,7 +48,7 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
||||
"verify_ssl": {
|
||||
"type": "boolean",
|
||||
"title": "Verify SSL certificate",
|
||||
"default": True,
|
||||
"default": False,
|
||||
},
|
||||
},
|
||||
"order": [
|
||||
@@ -120,14 +120,29 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
||||
db = self.configuration["db"]
|
||||
port = self.configuration.get("port", 1433)
|
||||
|
||||
connection_string_fmt = "DRIVER={{ODBC Driver 17 for SQL Server}};SERVER={},{};DATABASE={};UID={};PWD={}"
|
||||
connection_string = connection_string_fmt.format(server, port, db, user, password)
|
||||
connection_params = {
|
||||
"Driver": "{ODBC Driver 18 for SQL Server}",
|
||||
"Server": server,
|
||||
"Port": port,
|
||||
"Database": db,
|
||||
"Uid": user,
|
||||
"Pwd": password,
|
||||
}
|
||||
|
||||
if self.configuration.get("use_ssl", False):
|
||||
connection_string += ";Encrypt=YES"
|
||||
connection_params["Encrypt"] = "YES"
|
||||
|
||||
if not self.configuration.get("verify_ssl"):
|
||||
connection_string += ";TrustServerCertificate=YES"
|
||||
connection_params["TrustServerCertificate"] = "YES"
|
||||
else:
|
||||
connection_params["TrustServerCertificate"] = "NO"
|
||||
else:
|
||||
connection_params["Encrypt"] = "NO"
|
||||
|
||||
def fn(k):
|
||||
return "{}={}".format(k, connection_params[k])
|
||||
|
||||
connection_string = ";".join(list(map(fn, connection_params)))
|
||||
|
||||
connection = pyodbc.connect(connection_string)
|
||||
cursor = connection.cursor()
|
||||
|
||||
@@ -231,7 +231,9 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
ON a.attrelid = c.oid
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
WHERE c.relkind IN ('m', 'f', 'p') AND has_table_privilege(s.nspname || '.' || c.relname, 'select')
|
||||
WHERE c.relkind IN ('m', 'f', 'p')
|
||||
AND has_table_privilege(s.nspname || '.' || c.relname, 'select')
|
||||
AND has_schema_privilege(s.nspname, 'usage')
|
||||
|
||||
UNION
|
||||
|
||||
|
||||
@@ -1,169 +0,0 @@
|
||||
import logging
|
||||
import time
|
||||
from io import StringIO
|
||||
|
||||
import requests
|
||||
|
||||
from redash.query_runner import (
|
||||
TYPE_STRING,
|
||||
BaseQueryRunner,
|
||||
JobTimeoutException,
|
||||
register,
|
||||
)
|
||||
|
||||
try:
|
||||
import qds_sdk # noqa: F401
|
||||
from qds_sdk.commands import (
|
||||
Command,
|
||||
HiveCommand,
|
||||
PrestoCommand,
|
||||
SqlCommand,
|
||||
)
|
||||
from qds_sdk.qubole import Qubole as qbol
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
|
||||
class Qubole(BaseQueryRunner):
|
||||
should_annotate_query = False
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query_type": {
|
||||
"type": "string",
|
||||
"title": "Query Type (quantum / presto / hive)",
|
||||
"default": "hive",
|
||||
},
|
||||
"endpoint": {
|
||||
"type": "string",
|
||||
"title": "API Endpoint",
|
||||
"default": "https://api.qubole.com",
|
||||
},
|
||||
"token": {"type": "string", "title": "Auth Token"},
|
||||
"cluster": {
|
||||
"type": "string",
|
||||
"title": "Cluster Label",
|
||||
"default": "default",
|
||||
},
|
||||
},
|
||||
"order": ["query_type", "endpoint", "token", "cluster"],
|
||||
"required": ["endpoint", "token"],
|
||||
"secret": ["token"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "qubole"
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return "Qubole"
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
def test_connection(self):
|
||||
headers = self._get_header()
|
||||
r = requests.head("%s/api/latest/users" % self.configuration.get("endpoint"), headers=headers)
|
||||
r.status_code == 200
|
||||
|
||||
def run_query(self, query, user):
|
||||
qbol.configure(
|
||||
api_token=self.configuration.get("token"),
|
||||
api_url="%s/api" % self.configuration.get("endpoint"),
|
||||
)
|
||||
|
||||
try:
|
||||
query_type = self.configuration.get("query_type", "hive")
|
||||
|
||||
if query_type == "quantum":
|
||||
cmd = SqlCommand.create(query=query)
|
||||
elif query_type == "hive":
|
||||
cmd = HiveCommand.create(query=query, label=self.configuration.get("cluster"))
|
||||
elif query_type == "presto":
|
||||
cmd = PrestoCommand.create(query=query, label=self.configuration.get("cluster"))
|
||||
else:
|
||||
raise Exception(
|
||||
"Invalid Query Type:%s.\
|
||||
It must be : hive / presto / quantum."
|
||||
% self.configuration.get("query_type")
|
||||
)
|
||||
|
||||
logging.info("Qubole command created with Id: %s and Status: %s", cmd.id, cmd.status)
|
||||
|
||||
while not Command.is_done(cmd.status):
|
||||
time.sleep(qbol.poll_interval)
|
||||
cmd = Command.find(cmd.id)
|
||||
logging.info("Qubole command Id: %s and Status: %s", cmd.id, cmd.status)
|
||||
|
||||
rows = []
|
||||
columns = []
|
||||
error = None
|
||||
|
||||
if cmd.status == "done":
|
||||
fp = StringIO()
|
||||
cmd.get_results(
|
||||
fp=fp,
|
||||
inline=True,
|
||||
delim="\t",
|
||||
fetch=False,
|
||||
qlog=None,
|
||||
arguments=["true"],
|
||||
)
|
||||
|
||||
results = fp.getvalue()
|
||||
fp.close()
|
||||
|
||||
data = results.split("\r\n")
|
||||
columns = self.fetch_columns([(i, TYPE_STRING) for i in data.pop(0).split("\t")])
|
||||
rows = [dict(zip((column["name"] for column in columns), row.split("\t"))) for row in data]
|
||||
|
||||
data = {"columns": columns, "rows": rows}
|
||||
except (KeyboardInterrupt, JobTimeoutException):
|
||||
logging.info("Sending KILL signal to Qubole Command Id: %s", cmd.id)
|
||||
cmd.cancel()
|
||||
raise
|
||||
|
||||
return data, error
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
schemas = {}
|
||||
try:
|
||||
headers = self._get_header()
|
||||
content = requests.get(
|
||||
"%s/api/latest/hive?describe=true&per_page=10000" % self.configuration.get("endpoint"),
|
||||
headers=headers,
|
||||
)
|
||||
data = content.json()
|
||||
|
||||
for schema in data["schemas"]:
|
||||
tables = data["schemas"][schema]
|
||||
for table in tables:
|
||||
table_name = list(table.keys())[0]
|
||||
columns = [f["name"] for f in table[table_name]["columns"]]
|
||||
|
||||
if schema != "default":
|
||||
table_name = "{}.{}".format(schema, table_name)
|
||||
|
||||
schemas[table_name] = {"name": table_name, "columns": columns}
|
||||
|
||||
except Exception as e:
|
||||
logging.error("Failed to get schema information from Qubole. Error {}".format(str(e)))
|
||||
|
||||
return list(schemas.values())
|
||||
|
||||
def _get_header(self):
|
||||
return {
|
||||
"Content-type": "application/json",
|
||||
"Accept": "application/json",
|
||||
"X-AUTH-TOKEN": self.configuration.get("token"),
|
||||
}
|
||||
|
||||
|
||||
register(Qubole)
|
||||
@@ -1,3 +1,5 @@
|
||||
import datetime
|
||||
import decimal
|
||||
import hashlib
|
||||
import logging
|
||||
import re
|
||||
@@ -105,6 +107,10 @@ def fix_column_name(name):
|
||||
def flatten(value):
|
||||
if isinstance(value, (list, dict)):
|
||||
return json_dumps(value)
|
||||
elif isinstance(value, decimal.Decimal):
|
||||
return float(value)
|
||||
elif isinstance(value, datetime.timedelta):
|
||||
return str(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
45
redash/query_runner/risingwave.py
Normal file
45
redash/query_runner/risingwave.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from redash.query_runner import register
|
||||
from redash.query_runner.pg import PostgreSQL
|
||||
|
||||
|
||||
class RisingWave(PostgreSQL):
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "risingwave"
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return "RisingWave"
|
||||
|
||||
def _get_tables(self, schema):
|
||||
query = """
|
||||
SELECT s.nspname as table_schema,
|
||||
c.relname as table_name,
|
||||
a.attname as column_name,
|
||||
null as data_type
|
||||
FROM pg_class c
|
||||
JOIN pg_namespace s
|
||||
ON c.relnamespace = s.oid
|
||||
AND s.nspname NOT IN ('pg_catalog', 'information_schema', 'rw_catalog')
|
||||
JOIN pg_attribute a
|
||||
ON a.attrelid = c.oid
|
||||
AND a.attnum > 0
|
||||
AND NOT a.attisdropped
|
||||
WHERE c.relkind IN ('m', 'f', 'p')
|
||||
|
||||
UNION
|
||||
|
||||
SELECT table_schema,
|
||||
table_name,
|
||||
column_name,
|
||||
data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema', 'rw_catalog');
|
||||
"""
|
||||
|
||||
self._get_definitions(schema, query)
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
|
||||
register(RisingWave)
|
||||
@@ -1,6 +1,6 @@
|
||||
import functools
|
||||
|
||||
from flask import request, session
|
||||
from flask import session
|
||||
from flask_login import current_user
|
||||
from flask_talisman import talisman
|
||||
from flask_wtf.csrf import CSRFProtect, generate_csrf
|
||||
@@ -35,17 +35,6 @@ def init_app(app):
|
||||
|
||||
@app.before_request
|
||||
def check_csrf():
|
||||
# BEGIN workaround until https://github.com/lepture/flask-wtf/pull/419 is merged
|
||||
if request.blueprint in csrf._exempt_blueprints:
|
||||
return
|
||||
|
||||
view = app.view_functions.get(request.endpoint)
|
||||
dest = f"{view.__module__}.{view.__name__}"
|
||||
|
||||
if dest in csrf._exempt_views:
|
||||
return
|
||||
# END workaround
|
||||
|
||||
if not current_user.is_authenticated or "user_id" in session:
|
||||
csrf.protect()
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ This will eventually replace all the `to_dict` methods of the different model
|
||||
classes we have. This will ensure cleaner code and better
|
||||
separation of concerns.
|
||||
"""
|
||||
|
||||
from flask_login import current_user
|
||||
from funcy import project
|
||||
from rq.job import JobStatus
|
||||
@@ -276,6 +277,9 @@ def serialize_job(job):
|
||||
JobStatus.STARTED: 2,
|
||||
JobStatus.FINISHED: 3,
|
||||
JobStatus.FAILED: 4,
|
||||
JobStatus.CANCELED: 5,
|
||||
JobStatus.DEFERRED: 6,
|
||||
JobStatus.SCHEDULED: 7,
|
||||
}
|
||||
|
||||
job_status = job.get_status()
|
||||
|
||||
@@ -312,7 +312,6 @@ default_query_runners = [
|
||||
"redash.query_runner.salesforce",
|
||||
"redash.query_runner.query_results",
|
||||
"redash.query_runner.prometheus",
|
||||
"redash.query_runner.qubole",
|
||||
"redash.query_runner.db2",
|
||||
"redash.query_runner.druid",
|
||||
"redash.query_runner.kylin",
|
||||
@@ -339,6 +338,7 @@ default_query_runners = [
|
||||
"redash.query_runner.ignite",
|
||||
"redash.query_runner.oracle",
|
||||
"redash.query_runner.e6data",
|
||||
"redash.query_runner.risingwave",
|
||||
]
|
||||
|
||||
enabled_query_runners = array_from_string(
|
||||
@@ -412,7 +412,6 @@ PAGE_SIZE_OPTIONS = list(
|
||||
TABLE_CELL_MAX_JSON_SIZE = int(os.environ.get("REDASH_TABLE_CELL_MAX_JSON_SIZE", 50000))
|
||||
|
||||
# Features:
|
||||
VERSION_CHECK = parse_boolean(os.environ.get("REDASH_VERSION_CHECK", "true"))
|
||||
FEATURE_DISABLE_REFRESH_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_DISABLE_REFRESH_QUERIES", "false"))
|
||||
FEATURE_SHOW_QUERY_RESULTS_COUNT = parse_boolean(os.environ.get("REDASH_FEATURE_SHOW_QUERY_RESULTS_COUNT", "true"))
|
||||
FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS = parse_boolean(
|
||||
|
||||
@@ -45,7 +45,6 @@ HIDE_PLOTLY_MODE_BAR = parse_boolean(os.environ.get("HIDE_PLOTLY_MODE_BAR", "fal
|
||||
DISABLE_PUBLIC_URLS = parse_boolean(os.environ.get("REDASH_DISABLE_PUBLIC_URLS", "false"))
|
||||
|
||||
settings = {
|
||||
"beacon_consent": None,
|
||||
"auth_password_login_enabled": PASSWORD_LOGIN_ENABLED,
|
||||
"auth_saml_enabled": SAML_LOGIN_ENABLED,
|
||||
"auth_saml_type": SAML_LOGIN_TYPE,
|
||||
|
||||
@@ -7,7 +7,6 @@ from redash.tasks.general import (
|
||||
record_event,
|
||||
send_mail,
|
||||
sync_user_details,
|
||||
version_check,
|
||||
)
|
||||
from redash.tasks.queries import (
|
||||
cleanup_query_results,
|
||||
|
||||
@@ -5,7 +5,6 @@ from redash import mail, models, settings
|
||||
from redash.models import users
|
||||
from redash.query_runner import NotSupported
|
||||
from redash.tasks.worker import Queue
|
||||
from redash.version_check import run_version_check
|
||||
from redash.worker import get_job_logger, job
|
||||
|
||||
logger = get_job_logger(__name__)
|
||||
@@ -30,27 +29,6 @@ def record_event(raw_event):
|
||||
logger.exception("Failed posting to %s", hook)
|
||||
|
||||
|
||||
def version_check():
|
||||
run_version_check()
|
||||
|
||||
|
||||
@job("default")
|
||||
def subscribe(form):
|
||||
logger.info(
|
||||
"Subscribing to: [security notifications=%s], [newsletter=%s]",
|
||||
form["security_notifications"],
|
||||
form["newsletter"],
|
||||
)
|
||||
data = {
|
||||
"admin_name": form["name"],
|
||||
"admin_email": form["email"],
|
||||
"org_name": form["org_name"],
|
||||
"security_notifications": form["security_notifications"],
|
||||
"newsletter": form["newsletter"],
|
||||
}
|
||||
requests.post("https://beacon.redash.io/subscribe", json=data)
|
||||
|
||||
|
||||
@job("emails")
|
||||
def send_mail(to, subject, html, text):
|
||||
try:
|
||||
|
||||
@@ -55,7 +55,7 @@ def enqueue_query(query, data_source, user_id, is_api_key=False, scheduled_query
|
||||
if job_complete:
|
||||
message = "job found is complete (%s)" % status
|
||||
elif job_cancelled:
|
||||
message = "job found has ben cancelled"
|
||||
message = "job found has been cancelled"
|
||||
except NoSuchJobError:
|
||||
message = "job found has expired"
|
||||
job_exists = False
|
||||
|
||||
@@ -8,7 +8,7 @@ from rq_scheduler import Scheduler
|
||||
|
||||
from redash import rq_redis_connection, settings
|
||||
from redash.tasks.failure_report import send_aggregated_errors
|
||||
from redash.tasks.general import sync_user_details, version_check
|
||||
from redash.tasks.general import sync_user_details
|
||||
from redash.tasks.queries import (
|
||||
cleanup_query_results,
|
||||
empty_schedules,
|
||||
@@ -79,9 +79,6 @@ def periodic_job_definitions():
|
||||
},
|
||||
]
|
||||
|
||||
if settings.VERSION_CHECK:
|
||||
jobs.append({"func": version_check, "interval": timedelta(days=1)})
|
||||
|
||||
if settings.QUERY_RESULTS_CLEANUP_ENABLED:
|
||||
jobs.append({"func": cleanup_query_results, "interval": timedelta(minutes=5)})
|
||||
|
||||
|
||||
@@ -42,20 +42,6 @@
|
||||
{{ render_field(form.email) }}
|
||||
{{ render_field(form.password) }}
|
||||
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
{{ form.security_notifications() }}
|
||||
Subscribe to Security Notifications
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div class="checkbox">
|
||||
<label>
|
||||
{{ form.newsletter() }}
|
||||
Subscribe to newsletter (version updates, no more than once a month)
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<h4 class="m-t-25">General</h4>
|
||||
|
||||
{{ render_field(form.org_name, help_block="Used in email notifications and the UI.") }}
|
||||
|
||||
310
redash/utils/query_order.py
Normal file
310
redash/utils/query_order.py
Normal file
@@ -0,0 +1,310 @@
|
||||
# Copyright (c) 2012, Konsta Vesterinen
|
||||
#
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright notice, this
|
||||
# list of conditions and the following disclaimer.
|
||||
#
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# * The names of the contributors may not be used to endorse or promote products
|
||||
# derived from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY DIRECT,
|
||||
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
||||
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from inspect import isclass
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.orm import mapperlib
|
||||
from sqlalchemy.orm.properties import ColumnProperty
|
||||
from sqlalchemy.orm.query import _ColumnEntity
|
||||
from sqlalchemy.orm.util import AliasedInsp
|
||||
from sqlalchemy.sql.expression import asc, desc
|
||||
|
||||
|
||||
def get_query_descriptor(query, entity, attr):
|
||||
if attr in query_labels(query):
|
||||
return attr
|
||||
else:
|
||||
entity = get_query_entity_by_alias(query, entity)
|
||||
if entity:
|
||||
descriptor = get_descriptor(entity, attr)
|
||||
if hasattr(descriptor, "property") and isinstance(descriptor.property, sa.orm.RelationshipProperty):
|
||||
return
|
||||
return descriptor
|
||||
|
||||
|
||||
def query_labels(query):
|
||||
"""
|
||||
Return all labels for given SQLAlchemy query object.
|
||||
Example::
|
||||
query = session.query(
|
||||
Category,
|
||||
db.func.count(Article.id).label('articles')
|
||||
)
|
||||
query_labels(query) # ['articles']
|
||||
:param query: SQLAlchemy Query object
|
||||
"""
|
||||
return [
|
||||
entity._label_name for entity in query._entities if isinstance(entity, _ColumnEntity) and entity._label_name
|
||||
]
|
||||
|
||||
|
||||
def get_query_entity_by_alias(query, alias):
|
||||
entities = get_query_entities(query)
|
||||
if not alias:
|
||||
return entities[0]
|
||||
for entity in entities:
|
||||
if isinstance(entity, sa.orm.util.AliasedClass):
|
||||
name = sa.inspect(entity).name
|
||||
else:
|
||||
name = get_mapper(entity).tables[0].name
|
||||
if name == alias:
|
||||
return entity
|
||||
|
||||
|
||||
def get_query_entities(query):
|
||||
"""
|
||||
Return a list of all entities present in given SQLAlchemy query object.
|
||||
Examples::
|
||||
from sqlalchemy_utils import get_query_entities
|
||||
query = session.query(Category)
|
||||
get_query_entities(query) # [<Category>]
|
||||
query = session.query(Category.id)
|
||||
get_query_entities(query) # [<Category>]
|
||||
This function also supports queries with joins.
|
||||
::
|
||||
query = session.query(Category).join(Article)
|
||||
get_query_entities(query) # [<Category>, <Article>]
|
||||
.. versionchanged: 0.26.7
|
||||
This function now returns a list instead of generator
|
||||
:param query: SQLAlchemy Query object
|
||||
"""
|
||||
exprs = [
|
||||
d["expr"] if is_labeled_query(d["expr"]) or isinstance(d["expr"], sa.Column) else d["entity"]
|
||||
for d in query.column_descriptions
|
||||
]
|
||||
return [get_query_entity(expr) for expr in exprs] + [get_query_entity(entity) for entity in query._join_entities]
|
||||
|
||||
|
||||
def is_labeled_query(expr):
|
||||
return isinstance(expr, sa.sql.elements.Label) and isinstance(
|
||||
list(expr.base_columns)[0], (sa.sql.selectable.Select, sa.sql.selectable.ScalarSelect)
|
||||
)
|
||||
|
||||
|
||||
def get_query_entity(expr):
|
||||
if isinstance(expr, sa.orm.attributes.InstrumentedAttribute):
|
||||
return expr.parent.class_
|
||||
elif isinstance(expr, sa.Column):
|
||||
return expr.table
|
||||
elif isinstance(expr, AliasedInsp):
|
||||
return expr.entity
|
||||
return expr
|
||||
|
||||
|
||||
def get_mapper(mixed):
|
||||
"""
|
||||
Return related SQLAlchemy Mapper for given SQLAlchemy object.
|
||||
:param mixed: SQLAlchemy Table / Alias / Mapper / declarative model object
|
||||
::
|
||||
from sqlalchemy_utils import get_mapper
|
||||
get_mapper(User)
|
||||
get_mapper(User())
|
||||
get_mapper(User.__table__)
|
||||
get_mapper(User.__mapper__)
|
||||
get_mapper(sa.orm.aliased(User))
|
||||
get_mapper(sa.orm.aliased(User.__table__))
|
||||
Raises:
|
||||
ValueError: if multiple mappers were found for given argument
|
||||
.. versionadded: 0.26.1
|
||||
"""
|
||||
if isinstance(mixed, sa.orm.query._MapperEntity):
|
||||
mixed = mixed.expr
|
||||
elif isinstance(mixed, sa.Column):
|
||||
mixed = mixed.table
|
||||
elif isinstance(mixed, sa.orm.query._ColumnEntity):
|
||||
mixed = mixed.expr
|
||||
if isinstance(mixed, sa.orm.Mapper):
|
||||
return mixed
|
||||
if isinstance(mixed, sa.orm.util.AliasedClass):
|
||||
return sa.inspect(mixed).mapper
|
||||
if isinstance(mixed, sa.sql.selectable.Alias):
|
||||
mixed = mixed.element
|
||||
if isinstance(mixed, AliasedInsp):
|
||||
return mixed.mapper
|
||||
if isinstance(mixed, sa.orm.attributes.InstrumentedAttribute):
|
||||
mixed = mixed.class_
|
||||
if isinstance(mixed, sa.Table):
|
||||
mappers = [mapper for mapper in mapperlib._mapper_registry if mixed in mapper.tables]
|
||||
if len(mappers) > 1:
|
||||
raise ValueError("Multiple mappers found for table '%s'." % mixed.name)
|
||||
elif not mappers:
|
||||
raise ValueError("Could not get mapper for table '%s'." % mixed.name)
|
||||
else:
|
||||
return mappers[0]
|
||||
if not isclass(mixed):
|
||||
mixed = type(mixed)
|
||||
return sa.inspect(mixed)
|
||||
|
||||
|
||||
def get_polymorphic_mappers(mixed):
|
||||
if isinstance(mixed, AliasedInsp):
|
||||
return mixed.with_polymorphic_mappers
|
||||
else:
|
||||
return mixed.polymorphic_map.values()
|
||||
|
||||
|
||||
def get_descriptor(entity, attr):
|
||||
mapper = sa.inspect(entity)
|
||||
for key, descriptor in get_all_descriptors(mapper).items():
|
||||
if attr == key:
|
||||
prop = descriptor.property if hasattr(descriptor, "property") else None
|
||||
if isinstance(prop, ColumnProperty):
|
||||
if isinstance(entity, sa.orm.util.AliasedClass):
|
||||
for c in mapper.selectable.c:
|
||||
if c.key == attr:
|
||||
return c
|
||||
else:
|
||||
# If the property belongs to a class that uses
|
||||
# polymorphic inheritance we have to take into account
|
||||
# situations where the attribute exists in child class
|
||||
# but not in parent class.
|
||||
return getattr(prop.parent.class_, attr)
|
||||
else:
|
||||
# Handle synonyms, relationship properties and hybrid
|
||||
# properties
|
||||
if isinstance(entity, sa.orm.util.AliasedClass):
|
||||
return getattr(entity, attr)
|
||||
try:
|
||||
return getattr(mapper.class_, attr)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
def get_all_descriptors(expr):
|
||||
if isinstance(expr, sa.sql.selectable.Selectable):
|
||||
return expr.c
|
||||
insp = sa.inspect(expr)
|
||||
try:
|
||||
polymorphic_mappers = get_polymorphic_mappers(insp)
|
||||
except sa.exc.NoInspectionAvailable:
|
||||
return get_mapper(expr).all_orm_descriptors
|
||||
else:
|
||||
attrs = dict(get_mapper(expr).all_orm_descriptors)
|
||||
for submapper in polymorphic_mappers:
|
||||
for key, descriptor in submapper.all_orm_descriptors.items():
|
||||
if key not in attrs:
|
||||
attrs[key] = descriptor
|
||||
return attrs
|
||||
|
||||
|
||||
class QuerySorterException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class QuerySorter:
|
||||
def __init__(self, silent=True, separator="-"):
|
||||
self.separator = separator
|
||||
self.silent = silent
|
||||
|
||||
def assign_order_by(self, entity, attr, func):
|
||||
expr = get_query_descriptor(self.query, entity, attr)
|
||||
if expr is not None:
|
||||
return self.query.order_by(func(expr))
|
||||
if not self.silent:
|
||||
raise QuerySorterException("Could not sort query with expression '%s'" % attr)
|
||||
return self.query
|
||||
|
||||
def parse_sort_arg(self, arg):
|
||||
if arg[0] == self.separator:
|
||||
func = desc
|
||||
arg = arg[1:]
|
||||
else:
|
||||
func = asc
|
||||
parts = arg.split(self.separator)
|
||||
return {
|
||||
"entity": parts[0] if len(parts) > 1 else None,
|
||||
"attr": parts[1] if len(parts) > 1 else arg,
|
||||
"func": func,
|
||||
}
|
||||
|
||||
def __call__(self, query, *args):
|
||||
self.query = query
|
||||
for sort in args:
|
||||
if not sort:
|
||||
continue
|
||||
self.query = self.assign_order_by(**self.parse_sort_arg(sort))
|
||||
return self.query
|
||||
|
||||
|
||||
def sort_query(query, *args, **kwargs):
|
||||
"""
|
||||
Applies an sql ORDER BY for given query. This function can be easily used
|
||||
with user-defined sorting.
|
||||
The examples use the following model definition:
|
||||
::
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy_utils import sort_query
|
||||
engine = create_engine(
|
||||
'sqlite:///'
|
||||
)
|
||||
Base = declarative_base()
|
||||
Session = sessionmaker(bind=engine)
|
||||
session = Session()
|
||||
class Category(Base):
|
||||
__tablename__ = 'category'
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
name = sa.Column(sa.Unicode(255))
|
||||
class Article(Base):
|
||||
__tablename__ = 'article'
|
||||
id = sa.Column(sa.Integer, primary_key=True)
|
||||
name = sa.Column(sa.Unicode(255))
|
||||
category_id = sa.Column(sa.Integer, sa.ForeignKey(Category.id))
|
||||
category = sa.orm.relationship(
|
||||
Category, primaryjoin=category_id == Category.id
|
||||
)
|
||||
1. Applying simple ascending sort
|
||||
::
|
||||
query = session.query(Article)
|
||||
query = sort_query(query, 'name')
|
||||
2. Applying descending sort
|
||||
::
|
||||
query = sort_query(query, '-name')
|
||||
3. Applying sort to custom calculated label
|
||||
::
|
||||
query = session.query(
|
||||
Category, sa.func.count(Article.id).label('articles')
|
||||
)
|
||||
query = sort_query(query, 'articles')
|
||||
4. Applying sort to joined table column
|
||||
::
|
||||
query = session.query(Article).join(Article.category)
|
||||
query = sort_query(query, 'category-name')
|
||||
:param query:
|
||||
query to be modified
|
||||
:param sort:
|
||||
string that defines the label or column to sort the query by
|
||||
:param silent:
|
||||
Whether or not to raise exceptions if unknown sort column
|
||||
is passed. By default this is `True` indicating that no errors should
|
||||
be raised for unknown columns.
|
||||
"""
|
||||
return QuerySorter(**kwargs)(query, *args)
|
||||
@@ -1,103 +0,0 @@
|
||||
import logging
|
||||
|
||||
import requests
|
||||
import semver
|
||||
|
||||
from redash import __version__ as current_version
|
||||
from redash import redis_connection
|
||||
from redash.models import Organization, db
|
||||
|
||||
REDIS_KEY = "new_version_available"
|
||||
|
||||
|
||||
def usage_data():
|
||||
counts_query = """
|
||||
SELECT 'users_count' as name, count(0) as value
|
||||
FROM users
|
||||
WHERE disabled_at is null
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT 'queries_count' as name, count(0) as value
|
||||
FROM queries
|
||||
WHERE is_archived is false
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT 'alerts_count' as name, count(0) as value
|
||||
FROM alerts
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT 'dashboards_count' as name, count(0) as value
|
||||
FROM dashboards
|
||||
WHERE is_archived is false
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT 'widgets_count' as name, count(0) as value
|
||||
FROM widgets
|
||||
WHERE visualization_id is not null
|
||||
|
||||
UNION ALL
|
||||
|
||||
SELECT 'textbox_count' as name, count(0) as value
|
||||
FROM widgets
|
||||
WHERE visualization_id is null
|
||||
"""
|
||||
|
||||
data_sources_query = "SELECT type, count(0) FROM data_sources GROUP by 1"
|
||||
visualizations_query = "SELECT type, count(0) FROM visualizations GROUP by 1"
|
||||
destinations_query = "SELECT type, count(0) FROM notification_destinations GROUP by 1"
|
||||
|
||||
data = {name: value for (name, value) in db.session.execute(counts_query)}
|
||||
data["data_sources"] = {name: value for (name, value) in db.session.execute(data_sources_query)}
|
||||
data["visualization_types"] = {name: value for (name, value) in db.session.execute(visualizations_query)}
|
||||
data["destination_types"] = {name: value for (name, value) in db.session.execute(destinations_query)}
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def run_version_check():
|
||||
logging.info("Performing version check.")
|
||||
logging.info("Current version: %s", current_version)
|
||||
|
||||
data = {"current_version": current_version}
|
||||
|
||||
if Organization.query.first().get_setting("beacon_consent"):
|
||||
data["usage"] = usage_data()
|
||||
|
||||
try:
|
||||
response = requests.post(
|
||||
"https://version.redash.io/api/report?channel=stable",
|
||||
json=data,
|
||||
timeout=3.0,
|
||||
)
|
||||
latest_version = response.json()["release"]["version"]
|
||||
|
||||
_compare_and_update(latest_version)
|
||||
except requests.RequestException:
|
||||
logging.exception("Failed checking for new version.")
|
||||
except (ValueError, KeyError):
|
||||
logging.exception("Failed checking for new version (probably bad/non-JSON response).")
|
||||
|
||||
|
||||
def reset_new_version_status():
|
||||
latest_version = get_latest_version()
|
||||
if latest_version:
|
||||
_compare_and_update(latest_version)
|
||||
|
||||
|
||||
def get_latest_version():
|
||||
return redis_connection.get(REDIS_KEY)
|
||||
|
||||
|
||||
def _compare_and_update(latest_version):
|
||||
# TODO: support alpha channel (allow setting which channel to check & parse build number)
|
||||
is_newer = semver.compare(current_version, latest_version) == -1
|
||||
logging.info("Latest version: %s (newer: %s)", latest_version, is_newer)
|
||||
|
||||
if is_newer:
|
||||
redis_connection.set(REDIS_KEY, latest_version)
|
||||
else:
|
||||
redis_connection.delete(REDIS_KEY)
|
||||
@@ -5,6 +5,7 @@ from unittest import mock
|
||||
from redash.destinations.asana import Asana
|
||||
from redash.destinations.datadog import Datadog
|
||||
from redash.destinations.discord import Discord
|
||||
from redash.destinations.slack import Slack
|
||||
from redash.destinations.webex import Webex
|
||||
from redash.models import Alert, NotificationDestination
|
||||
from tests import BaseTestCase
|
||||
@@ -201,6 +202,59 @@ def test_asana_notify_calls_requests_post():
|
||||
assert mock_response.status_code == 204
|
||||
|
||||
|
||||
def test_slack_notify_calls_requests_post():
|
||||
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
|
||||
alert.id = 1
|
||||
alert.name = "Test Alert"
|
||||
alert.custom_subject = "Test custom subject"
|
||||
alert.custom_body = "Test custom body"
|
||||
|
||||
alert.render_template = mock.Mock(return_value={"Rendered": "template"})
|
||||
query = mock.Mock()
|
||||
query.id = 1
|
||||
|
||||
user = mock.Mock()
|
||||
app = mock.Mock()
|
||||
host = "https://localhost:5000"
|
||||
options = {"url": "https://slack.com/api/api.test"}
|
||||
metadata = {"Scheduled": False}
|
||||
|
||||
new_state = Alert.TRIGGERED_STATE
|
||||
destination = Slack(options)
|
||||
|
||||
with mock.patch("redash.destinations.slack.requests.post") as mock_post:
|
||||
mock_response = mock.Mock()
|
||||
mock_response.status_code = 204
|
||||
mock_post.return_value = mock_response
|
||||
|
||||
destination.notify(alert, query, user, new_state, app, host, metadata, options)
|
||||
|
||||
query_link = f"{host}/queries/{query.id}"
|
||||
alert_link = f"{host}/alerts/{alert.id}"
|
||||
|
||||
expected_payload = {
|
||||
"attachments": [
|
||||
{
|
||||
"text": "Test custom subject",
|
||||
"color": "#c0392b",
|
||||
"fields": [
|
||||
{"title": "Query", "type": "mrkdwn", "value": query_link},
|
||||
{"title": "Alert", "type": "mrkdwn", "value": alert_link},
|
||||
{"title": "Description", "value": "Test custom body"},
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
mock_post.assert_called_once_with(
|
||||
"https://slack.com/api/api.test",
|
||||
data=json.dumps(expected_payload).encode(),
|
||||
timeout=5.0,
|
||||
)
|
||||
|
||||
assert mock_response.status_code == 204
|
||||
|
||||
|
||||
def test_webex_notify_calls_requests_post():
|
||||
alert = mock.Mock(spec_set=["id", "name", "custom_subject", "custom_body", "render_template"])
|
||||
alert.id = 1
|
||||
|
||||
91
tests/handlers/test_order_results.py
Normal file
91
tests/handlers/test_order_results.py
Normal file
@@ -0,0 +1,91 @@
|
||||
from redash import models
|
||||
from redash.handlers.base import order_results
|
||||
from redash.models import db
|
||||
from tests import BaseTestCase
|
||||
|
||||
|
||||
class TestOrderResults(BaseTestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
|
||||
user1 = self.factory.create_user(name="Charlie")
|
||||
user2 = self.factory.create_user(name="Bravo")
|
||||
user3 = self.factory.create_user(name="Alpha")
|
||||
|
||||
q1 = self.factory.create_query(name="a", user=user1)
|
||||
q2 = self.factory.create_query(name="b", user=user2)
|
||||
q3 = self.factory.create_query(name="c", user=user3)
|
||||
|
||||
db.session.add(user1)
|
||||
db.session.add(user2)
|
||||
db.session.add(user3)
|
||||
|
||||
db.session.add(q1)
|
||||
db.session.add(q2)
|
||||
db.session.add(q3)
|
||||
db.session.commit()
|
||||
|
||||
self.results = db.session.query(models.Query)
|
||||
self.results = self.results.join(models.User, models.Query.user_id == models.User.id)
|
||||
|
||||
self.allowed_orders = {
|
||||
"name": "name",
|
||||
"-name": "-name",
|
||||
"users-name": "users-name",
|
||||
"-users-name": "-users-name",
|
||||
}
|
||||
self.default_order = "-name"
|
||||
|
||||
def test_no_order_no_fallback(self):
|
||||
with self.app.test_request_context("/items?order="):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=False)
|
||||
self.assertEqual(self.results, ordered_results)
|
||||
|
||||
def test_no_order_yes_fallback(self):
|
||||
with self.app.test_request_context("/items?order="):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=True)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["c", "b", "a"])
|
||||
|
||||
def test_invalid_order_no_fallback(self):
|
||||
with self.app.test_request_context("/items?order=some_invalid_order"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=False)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, [entry.name for entry in self.results])
|
||||
|
||||
def test_invalid_order_yes_fallback(self):
|
||||
with self.app.test_request_context("/items?order=some_invalid_order"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=True)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["c", "b", "a"])
|
||||
|
||||
def test_valid_requested_order_no_fallback(self):
|
||||
with self.app.test_request_context("/items?order=name"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=False)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["a", "b", "c"])
|
||||
|
||||
def test_valid_requested_order_yes_fallback(self):
|
||||
with self.app.test_request_context("/items?order=name"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=True)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["a", "b", "c"])
|
||||
|
||||
def test_requested_entity_no_fallback(self):
|
||||
with self.app.test_request_context("/items?order=users-name"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=False)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["c", "b", "a"])
|
||||
|
||||
def test_requested_entity_yes_fallback(self):
|
||||
with self.app.test_request_context("/items?order=-users-name"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=True)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["a", "b", "c"])
|
||||
|
||||
def test_order_by_attached(self):
|
||||
self.results = self.results.order_by(models.Query.name)
|
||||
with self.app.test_request_context("/items?order=-name"):
|
||||
ordered_results = order_results(self.results, self.default_order, self.allowed_orders, fallback=False)
|
||||
ordered_results = [entry.name for entry in ordered_results]
|
||||
self.assertEqual(ordered_results, ["c", "b", "a"])
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Some test cases around the Glue catalog.
|
||||
"""
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
import botocore
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from unittest import TestCase
|
||||
from unittest import TestCase, mock
|
||||
|
||||
from redash.query_runner.elasticsearch2 import (
|
||||
ElasticSearch2,
|
||||
@@ -137,3 +137,14 @@ class TestXPackSQL(TestCase):
|
||||
],
|
||||
}
|
||||
self.assertDictEqual(XPackSQLElasticSearch._parse_results(None, response), expected)
|
||||
|
||||
|
||||
class TestElasticSearch2(TestCase):
|
||||
@mock.patch("redash.query_runner.elasticsearch2.ElasticSearch2.__init__", return_value=None)
|
||||
def test_build_query(self, mock_init):
|
||||
query_runner = ElasticSearch2()
|
||||
query_str = '{"index": "test_index", "result_fields": ["field1", "field2"]}'
|
||||
query_dict, url, result_fields = query_runner._build_query(query_str)
|
||||
self.assertEqual(query_dict, {})
|
||||
self.assertEqual(url, "/test_index/_search")
|
||||
self.assertEqual(result_fields, ["field1", "field2"])
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Some test cases for JSON api runner
|
||||
"""
|
||||
|
||||
from unittest import TestCase
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
|
||||
@@ -141,7 +141,13 @@ class TestMongoResults(TestCase):
|
||||
"column": 2,
|
||||
"column2": "test",
|
||||
"column3": "hello",
|
||||
"nested": {"a": 2, "b": "str2", "c": "c", "d": {"e": 3}},
|
||||
"nested": {
|
||||
"a": 2,
|
||||
"b": "str2",
|
||||
"c": "c",
|
||||
"d": {"e": 3},
|
||||
"f": {"h": {"i": ["j", "k", "l"]}},
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
@@ -158,6 +164,7 @@ class TestMongoResults(TestCase):
|
||||
"nested.b": "str2",
|
||||
"nested.c": "c",
|
||||
"nested.d.e": 3,
|
||||
"nested.f.h.i": ["j", "k", "l"],
|
||||
},
|
||||
)
|
||||
|
||||
@@ -167,3 +174,50 @@ class TestMongoResults(TestCase):
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "nested.a"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "nested.b"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "nested.c"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "nested.d.e"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "nested.f.h.i"))
|
||||
|
||||
def test_parses_flatten_nested_results(self):
|
||||
raw_results = [
|
||||
{
|
||||
"column": 2,
|
||||
"column2": "test",
|
||||
"column3": "hello",
|
||||
"nested": {
|
||||
"a": 2,
|
||||
"b": "str2",
|
||||
"c": "c",
|
||||
"d": {"e": 3},
|
||||
"f": {"h": {"i": ["j", "k", "l"]}},
|
||||
},
|
||||
}
|
||||
]
|
||||
|
||||
rows, columns = parse_results(raw_results, flatten=True)
|
||||
print(rows)
|
||||
self.assertDictEqual(
|
||||
rows[0],
|
||||
{
|
||||
"column": 2,
|
||||
"column2": "test",
|
||||
"column3": "hello",
|
||||
"nested.a": 2,
|
||||
"nested.b": "str2",
|
||||
"nested.c": "c",
|
||||
"nested.d.e": 3,
|
||||
"nested.f.h.i.0": "j",
|
||||
"nested.f.h.i.1": "k",
|
||||
"nested.f.h.i.2": "l",
|
||||
},
|
||||
)
|
||||
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "column"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "column2"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "column3"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "nested.a"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "nested.b"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "nested.c"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "nested.d.e"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "nested.f.h.i.0"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "nested.f.h.i.1"))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, "nested.f.h.i.2"))
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import datetime
|
||||
import decimal
|
||||
import sqlite3
|
||||
from unittest import TestCase
|
||||
|
||||
@@ -107,6 +109,16 @@ class TestCreateTable(TestCase):
|
||||
create_table(connection, table_name, results)
|
||||
connection.execute("SELECT 1 FROM query_123")
|
||||
|
||||
def test_creates_table_with_decimal_and_timedelta_in_column_value(self):
|
||||
connection = sqlite3.connect(":memory:")
|
||||
results = {
|
||||
"columns": [{"name": "test1"}, {"name": "test2"}, {"name": "test3"}],
|
||||
"rows": [{"test1": 1, "test2": decimal.Decimal(2), "test3": datetime.timedelta(seconds=3)}],
|
||||
}
|
||||
table_name = "query_123"
|
||||
create_table(connection, table_name, results)
|
||||
connection.execute("SELECT 1 FROM query_123")
|
||||
|
||||
def test_shows_meaningful_error_on_failure_to_create_table(self):
|
||||
connection = sqlite3.connect(":memory:")
|
||||
results = {"columns": [], "rows": []}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Some test cases for Trino.
|
||||
"""
|
||||
|
||||
from unittest import TestCase
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
@@ -28,19 +28,19 @@
|
||||
"series": [
|
||||
{
|
||||
"visible": true,
|
||||
"values": [10, 60, 100, 30],
|
||||
"labels": ["Slice 0", "Slice 0", "Slice 0", "Slice 0"],
|
||||
"values": [200],
|
||||
"labels": ["Slice 0"],
|
||||
"type": "pie",
|
||||
"hole": 0.4,
|
||||
"marker": {
|
||||
"colors": ["#356AFF", "#E92828", "#3BD973", "#604FE9"]
|
||||
"colors": ["#356AFF"]
|
||||
},
|
||||
"hoverinfo": "text+label",
|
||||
"hover": [],
|
||||
"text": ["15% (30)", "15% (30)", "15% (30)", "15% (30)"],
|
||||
"text": ["100% (200)"],
|
||||
"textinfo": "percent",
|
||||
"textposition": "inside",
|
||||
"textfont": { "color": ["#ffffff", "#ffffff", "#333333", "#ffffff"] },
|
||||
"textfont": { "color": ["#ffffff"] },
|
||||
"name": "a",
|
||||
"direction": "counterclockwise",
|
||||
"domain": { "x": [0, 0.98], "y": [0, 0.9] }
|
||||
|
||||
@@ -91,27 +91,36 @@ function prepareSeries(series: any, options: any, additionalOptions: any) {
|
||||
};
|
||||
|
||||
const sourceData = new Map();
|
||||
const xValues: any = [];
|
||||
const yValues: any = [];
|
||||
|
||||
const labelsValuesMap = new Map();
|
||||
|
||||
const yErrorValues: any = [];
|
||||
each(data, row => {
|
||||
const x = normalizeValue(row.x, options.xAxis.type); // number/datetime/category
|
||||
const y = cleanYValue(row.y, seriesYAxis === "y2" ? options.yAxis[1].type : options.yAxis[0].type); // depends on series type!
|
||||
const yError = cleanNumber(row.yError); // always number
|
||||
const size = cleanNumber(row.size); // always number
|
||||
if (labelsValuesMap.has(x)) {
|
||||
labelsValuesMap.set(x, labelsValuesMap.get(x) + y);
|
||||
} else {
|
||||
labelsValuesMap.set(x, y);
|
||||
}
|
||||
const aggregatedY = labelsValuesMap.get(x);
|
||||
|
||||
sourceData.set(x, {
|
||||
x,
|
||||
y,
|
||||
y: aggregatedY,
|
||||
yError,
|
||||
size,
|
||||
yPercent: null, // will be updated later
|
||||
row,
|
||||
});
|
||||
xValues.push(x);
|
||||
yValues.push(y);
|
||||
yErrorValues.push(yError);
|
||||
});
|
||||
|
||||
const xValues = Array.from(labelsValuesMap.keys());
|
||||
const yValues = Array.from(labelsValuesMap.values());
|
||||
|
||||
const plotlySeries = {
|
||||
visible: true,
|
||||
hoverinfo: hoverInfoPattern,
|
||||
|
||||
@@ -41,8 +41,8 @@ function prepareSeries(series: any, options: any, additionalOptions: any) {
|
||||
const xPosition = (index % cellsInRow) * cellWidth;
|
||||
const yPosition = Math.floor(index / cellsInRow) * cellHeight;
|
||||
|
||||
const labels: any = [];
|
||||
const values: any = [];
|
||||
const labelsValuesMap = new Map();
|
||||
|
||||
const sourceData = new Map();
|
||||
const seriesTotal = reduce(
|
||||
series.data,
|
||||
@@ -55,19 +55,29 @@ function prepareSeries(series: any, options: any, additionalOptions: any) {
|
||||
each(series.data, row => {
|
||||
const x = hasX ? normalizeValue(row.x, options.xAxis.type) : `Slice ${index}`;
|
||||
const y = cleanNumber(row.y);
|
||||
labels.push(x);
|
||||
values.push(y);
|
||||
|
||||
if (labelsValuesMap.has(x)) {
|
||||
labelsValuesMap.set(x, labelsValuesMap.get(x) + y);
|
||||
} else {
|
||||
labelsValuesMap.set(x, y);
|
||||
}
|
||||
const aggregatedY = labelsValuesMap.get(x);
|
||||
|
||||
|
||||
sourceData.set(x, {
|
||||
x,
|
||||
y,
|
||||
yPercent: (y / seriesTotal) * 100,
|
||||
y: aggregatedY,
|
||||
yPercent: (aggregatedY / seriesTotal) * 100,
|
||||
row,
|
||||
});
|
||||
});
|
||||
|
||||
const markerColors = map(series.data, row => getValueColor(row.x));
|
||||
const markerColors = map(Array.from(sourceData.values()), data => getValueColor(data.row.x));
|
||||
const textColors = map(markerColors, c => chooseTextColorForBackground(c));
|
||||
|
||||
const labels = Array.from(labelsValuesMap.keys());
|
||||
const values = Array.from(labelsValuesMap.values());
|
||||
|
||||
return {
|
||||
visible: true,
|
||||
values,
|
||||
|
||||
@@ -4263,16 +4263,17 @@ es-to-primitive@^1.2.1:
|
||||
is-date-object "^1.0.1"
|
||||
is-symbol "^1.0.2"
|
||||
|
||||
es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50:
|
||||
version "0.10.53"
|
||||
resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1"
|
||||
integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q==
|
||||
es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50, es5-ext@^0.10.62, es5-ext@~0.10.14:
|
||||
version "0.10.63"
|
||||
resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.63.tgz#9c222a63b6a332ac80b1e373b426af723b895bd6"
|
||||
integrity sha512-hUCZd2Byj/mNKjfP9jXrdVZ62B8KuA/VoK7X8nUh5qT+AxDmcbvZz041oDVZdbIN1qW6XY9VDNwzkvKnZvK2TQ==
|
||||
dependencies:
|
||||
es6-iterator "~2.0.3"
|
||||
es6-symbol "~3.1.3"
|
||||
next-tick "~1.0.0"
|
||||
es6-iterator "^2.0.3"
|
||||
es6-symbol "^3.1.3"
|
||||
esniff "^2.0.1"
|
||||
next-tick "^1.1.0"
|
||||
|
||||
es6-iterator@^2.0.3, es6-iterator@~2.0.3:
|
||||
es6-iterator@^2.0.3:
|
||||
version "2.0.3"
|
||||
resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7"
|
||||
integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c=
|
||||
@@ -4286,7 +4287,7 @@ es6-promise@^4.2.8:
|
||||
resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.8.tgz#4eb21594c972bc40553d276e510539143db53e0a"
|
||||
integrity sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==
|
||||
|
||||
es6-symbol@^3.1.1, es6-symbol@~3.1.3:
|
||||
es6-symbol@^3.1.1, es6-symbol@^3.1.3:
|
||||
version "3.1.3"
|
||||
resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18"
|
||||
integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==
|
||||
@@ -4419,6 +4420,16 @@ eslint@^7.14.0:
|
||||
text-table "^0.2.0"
|
||||
v8-compile-cache "^2.0.3"
|
||||
|
||||
esniff@^2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/esniff/-/esniff-2.0.1.tgz#a4d4b43a5c71c7ec51c51098c1d8a29081f9b308"
|
||||
integrity sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg==
|
||||
dependencies:
|
||||
d "^1.0.1"
|
||||
es5-ext "^0.10.62"
|
||||
event-emitter "^0.3.5"
|
||||
type "^2.7.2"
|
||||
|
||||
espree@^7.3.0, espree@^7.3.1:
|
||||
version "7.3.1"
|
||||
resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6"
|
||||
@@ -4462,6 +4473,14 @@ esutils@^2.0.2:
|
||||
resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
|
||||
integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
|
||||
|
||||
event-emitter@^0.3.5:
|
||||
version "0.3.5"
|
||||
resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39"
|
||||
integrity sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==
|
||||
dependencies:
|
||||
d "1"
|
||||
es5-ext "~0.10.14"
|
||||
|
||||
events@^1.0.2:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924"
|
||||
@@ -4740,9 +4759,9 @@ flow-parser@0.*:
|
||||
integrity sha512-yEh5wJIi/BG7JZvWAWxTdfZ1uoh/W3oefJOuvUDosKFIE5H7ad8Eu6k9ba1yr83Lxn5vY6kZMG1IaiiiA/5scg==
|
||||
|
||||
follow-redirects@^1.15.0:
|
||||
version "1.15.5"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.5.tgz#54d4d6d062c0fa7d9d17feb008461550e3ba8020"
|
||||
integrity sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==
|
||||
version "1.15.6"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b"
|
||||
integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==
|
||||
|
||||
font-atlas@^2.1.0:
|
||||
version "2.1.0"
|
||||
@@ -7500,10 +7519,10 @@ neo-async@^2.6.2:
|
||||
resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f"
|
||||
integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==
|
||||
|
||||
next-tick@~1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c"
|
||||
integrity sha1-yobR/ogoFpsBICCOPchCS524NCw=
|
||||
next-tick@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb"
|
||||
integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==
|
||||
|
||||
nextafter@^1.0.0:
|
||||
version "1.0.0"
|
||||
@@ -10136,6 +10155,11 @@ type@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/type/-/type-2.0.0.tgz#5f16ff6ef2eb44f260494dae271033b29c09a9c3"
|
||||
integrity sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow==
|
||||
|
||||
type@^2.7.2:
|
||||
version "2.7.2"
|
||||
resolved "https://registry.yarnpkg.com/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0"
|
||||
integrity sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw==
|
||||
|
||||
typed-array-buffer@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz#18de3e7ed7974b0a729d3feecb94338d1472cd60"
|
||||
|
||||
83
yarn.lock
83
yarn.lock
@@ -3541,7 +3541,25 @@ bn.js@^5.0.0, bn.js@^5.2.1:
|
||||
resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70"
|
||||
integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ==
|
||||
|
||||
body-parser@1.20.1, body-parser@^1.18.3:
|
||||
body-parser@1.20.2:
|
||||
version "1.20.2"
|
||||
resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd"
|
||||
integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==
|
||||
dependencies:
|
||||
bytes "3.1.2"
|
||||
content-type "~1.0.5"
|
||||
debug "2.6.9"
|
||||
depd "2.0.0"
|
||||
destroy "1.2.0"
|
||||
http-errors "2.0.0"
|
||||
iconv-lite "0.4.24"
|
||||
on-finished "2.4.1"
|
||||
qs "6.11.0"
|
||||
raw-body "2.5.2"
|
||||
type-is "~1.6.18"
|
||||
unpipe "1.0.0"
|
||||
|
||||
body-parser@^1.18.3:
|
||||
version "1.20.1"
|
||||
resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668"
|
||||
integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==
|
||||
@@ -4528,6 +4546,11 @@ content-type@~1.0.4:
|
||||
resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b"
|
||||
integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==
|
||||
|
||||
content-type@~1.0.5:
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918"
|
||||
integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==
|
||||
|
||||
convert-source-map@^1.1.0:
|
||||
version "1.6.0"
|
||||
resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.6.0.tgz#51b537a8c43e0f04dec1993bffcdd504e758ac20"
|
||||
@@ -4554,10 +4577,10 @@ cookie-signature@1.0.6:
|
||||
resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
|
||||
integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw=
|
||||
|
||||
cookie@0.5.0:
|
||||
version "0.5.0"
|
||||
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b"
|
||||
integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==
|
||||
cookie@0.6.0:
|
||||
version "0.6.0"
|
||||
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051"
|
||||
integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==
|
||||
|
||||
copy-anything@^2.0.1:
|
||||
version "2.0.6"
|
||||
@@ -6320,16 +6343,16 @@ expect@^24.9.0:
|
||||
jest-regex-util "^24.9.0"
|
||||
|
||||
express@^4.16.3, express@^4.17.3:
|
||||
version "4.18.2"
|
||||
resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59"
|
||||
integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==
|
||||
version "4.19.2"
|
||||
resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465"
|
||||
integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==
|
||||
dependencies:
|
||||
accepts "~1.3.8"
|
||||
array-flatten "1.1.1"
|
||||
body-parser "1.20.1"
|
||||
body-parser "1.20.2"
|
||||
content-disposition "0.5.4"
|
||||
content-type "~1.0.4"
|
||||
cookie "0.5.0"
|
||||
cookie "0.6.0"
|
||||
cookie-signature "1.0.6"
|
||||
debug "2.6.9"
|
||||
depd "2.0.0"
|
||||
@@ -11984,6 +12007,16 @@ raw-body@2.5.1:
|
||||
iconv-lite "0.4.24"
|
||||
unpipe "1.0.0"
|
||||
|
||||
raw-body@2.5.2:
|
||||
version "2.5.2"
|
||||
resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a"
|
||||
integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==
|
||||
dependencies:
|
||||
bytes "3.1.2"
|
||||
http-errors "2.0.0"
|
||||
iconv-lite "0.4.24"
|
||||
unpipe "1.0.0"
|
||||
|
||||
raw-loader@^0.5.1:
|
||||
version "0.5.1"
|
||||
resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-0.5.1.tgz#0c3d0beaed8a01c966d9787bf778281252a979aa"
|
||||
@@ -14392,9 +14425,9 @@ tar-stream@^2.1.4:
|
||||
readable-stream "^3.1.1"
|
||||
|
||||
tar@^6.0.2:
|
||||
version "6.1.15"
|
||||
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69"
|
||||
integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==
|
||||
version "6.2.1"
|
||||
resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a"
|
||||
integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==
|
||||
dependencies:
|
||||
chownr "^2.0.0"
|
||||
fs-minipass "^2.0.0"
|
||||
@@ -15320,9 +15353,9 @@ webpack-cli@^4.10.0:
|
||||
webpack-merge "^5.7.3"
|
||||
|
||||
webpack-dev-middleware@^5.3.1:
|
||||
version "5.3.3"
|
||||
resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f"
|
||||
integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==
|
||||
version "5.3.4"
|
||||
resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz#eb7b39281cbce10e104eb2b8bf2b63fce49a3517"
|
||||
integrity sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==
|
||||
dependencies:
|
||||
colorette "^2.0.10"
|
||||
memfs "^3.4.3"
|
||||
@@ -15667,21 +15700,21 @@ write@1.0.3:
|
||||
mkdirp "^0.5.1"
|
||||
|
||||
ws@^5.2.0:
|
||||
version "5.2.3"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-5.2.3.tgz#05541053414921bc29c63bee14b8b0dd50b07b3d"
|
||||
integrity sha512-jZArVERrMsKUatIdnLzqvcfydI85dvd/Fp1u/VOpfdDWQ4c9qWXe+VIeAbQ5FrDwciAkr+lzofXLz3Kuf26AOA==
|
||||
version "5.2.4"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-5.2.4.tgz#c7bea9f1cfb5f410de50e70e82662e562113f9a7"
|
||||
integrity sha512-fFCejsuC8f9kOSu9FYaOw8CdO68O3h5v0lg4p74o8JqWpwTf9tniOD+nOB78aWoVSS6WptVUmDrp/KPsMVBWFQ==
|
||||
dependencies:
|
||||
async-limiter "~1.0.0"
|
||||
|
||||
ws@^7.2.3, ws@^7.3.1:
|
||||
version "7.5.9"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591"
|
||||
integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
|
||||
version "7.5.10"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9"
|
||||
integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==
|
||||
|
||||
ws@^8.13.0:
|
||||
version "8.13.0"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0"
|
||||
integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==
|
||||
version "8.17.1"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b"
|
||||
integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==
|
||||
|
||||
xml-name-validator@^3.0.0:
|
||||
version "3.0.0"
|
||||
|
||||
Reference in New Issue
Block a user