diff --git a/.circleci/config.yml b/.circleci/config.yml index 3e0e99a101..d214ebfcc1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,18 +1,13 @@ version: 2.0 -flake8-steps: &steps - - checkout - - run: sudo pip install flake8 - - run: ./bin/flake8_tests.sh jobs: python-flake8-tests: docker: - image: circleci/python:3.7.0 - steps: *steps - legacy-python-flake8-tests: - docker: - - image: circleci/python:2.7.15 - steps: *steps + steps: + - checkout + - run: sudo pip install flake8 + - run: ./bin/flake8_tests.sh backend-unit-tests: environment: COMPOSE_FILE: .circleci/docker-compose.circle.yml @@ -44,6 +39,7 @@ jobs: mkdir -p /tmp/test-results/unit-tests docker cp tests:/app/coverage.xml ./coverage.xml docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml + when: always - store_test_results: path: /tmp/test-results - store_artifacts: @@ -63,8 +59,8 @@ jobs: - image: circleci/node:8 steps: - checkout - - run: sudo apt install python-pip - - run: sudo pip install -r requirements_bundles.txt + - run: sudo apt install python3-pip + - run: sudo pip3 install -r requirements_bundles.txt - run: npm install - run: npm run bundle - run: npm test @@ -99,8 +95,8 @@ jobs: steps: - setup_remote_docker - checkout - - run: sudo apt install python-pip - - run: sudo pip install -r requirements_bundles.txt + - run: sudo apt install python3-pip + - run: sudo pip3 install -r requirements_bundles.txt - run: .circleci/update_version - run: npm run bundle - run: .circleci/docker_build @@ -109,7 +105,6 @@ workflows: build: jobs: - python-flake8-tests - - legacy-python-flake8-tests - backend-unit-tests - frontend-lint - frontend-unit-tests: diff --git a/Dockerfile b/Dockerfile index b9fc56431a..61ec64a557 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,11 +8,40 @@ COPY client /frontend/client COPY webpack.config.js /frontend/ RUN npm run build -FROM redash/base:debian +FROM python:3.7-slim + +EXPOSE 5000 # Controls whether to install extra dependencies needed for all data sources. ARG skip_ds_deps +RUN useradd --create-home redash + +# Ubuntu packages +RUN apt-get update && \ + apt-get install -y \ + curl \ + gnupg \ + build-essential \ + pwgen \ + libffi-dev \ + sudo \ + git-core \ + wget \ + # Postgres client + libpq-dev \ + # for SAML + xmlsec1 \ + # Additional packages required for data sources: + libssl-dev \ + default-libmysqlclient-dev \ + freetds-dev \ + libsasl2-dev && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +WORKDIR /app + # We first copy only the requirements file, to avoid rebuilding on every file # change. COPY requirements.txt requirements_bundles.txt requirements_dev.txt requirements_all_ds.txt ./ diff --git a/bin/bundle-extensions b/bin/bundle-extensions index fb44324c62..ffa65851ce 100755 --- a/bin/bundle-extensions +++ b/bin/bundle-extensions @@ -1,9 +1,8 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- +#!/usr/bin/env python3 """Copy bundle extension files to the client/app/extension directory""" import logging import os -from pathlib2 import Path +from pathlib import Path from shutil import copy from collections import OrderedDict as odict diff --git a/bin/get_changes.py b/bin/get_changes.py index 6d98d8672b..60091bb772 100644 --- a/bin/get_changes.py +++ b/bin/get_changes.py @@ -1,9 +1,10 @@ -#!/bin/env python -from __future__ import print_function +#!/bin/env python3 + import sys import re import subprocess + def get_change_log(previous_sha): args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', 'master...{}'.format(previous_sha)] log = subprocess.check_output(args) @@ -33,4 +34,4 @@ def get_change_log(previous_sha): changes = get_change_log(previous_sha) for change in changes: - print(change) \ No newline at end of file + print(change) diff --git a/bin/release_manager.py b/bin/release_manager.py index df00169da7..3d9b21c895 100644 --- a/bin/release_manager.py +++ b/bin/release_manager.py @@ -1,4 +1,4 @@ -from __future__ import print_function +#!/usr/bin/env python3 import os import sys import re diff --git a/bin/upgrade b/bin/upgrade index 9a595cf977..376866f1ed 100755 --- a/bin/upgrade +++ b/bin/upgrade @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import urllib import argparse import os @@ -27,7 +27,7 @@ def run(cmd, cwd=None): def confirm(question): - reply = str(raw_input(question + ' (y/n): ')).lower().strip() + reply = str(input(question + ' (y/n): ')).lower().strip() if reply[0] == 'y': return True diff --git a/migrations/versions/65fc9ede4746_add_is_draft_status_to_queries_and_.py b/migrations/versions/65fc9ede4746_add_is_draft_status_to_queries_and_.py index cee4581ec4..fe0ae00082 100644 --- a/migrations/versions/65fc9ede4746_add_is_draft_status_to_queries_and_.py +++ b/migrations/versions/65fc9ede4746_add_is_draft_status_to_queries_and_.py @@ -1,11 +1,11 @@ """Add is_draft status to queries and dashboards Revision ID: 65fc9ede4746 -Revises: +Revises: Create Date: 2016-12-07 18:08:13.395586 """ -from __future__ import print_function + from alembic import op import sqlalchemy as sa @@ -26,7 +26,7 @@ def upgrade(): op.execute("UPDATE dashboards SET is_draft = false") except ProgrammingError as e: # The columns might exist if you ran the old migrations. - if 'column "is_draft" of relation "queries" already exists' in e.message: + if 'column "is_draft" of relation "queries" already exists' in str(e): print("Can't run this migration as you already have is_draft columns, please run:") print("./manage.py db stamp {} # you might need to alter the command to match your environment.".format(revision)) exit() diff --git a/migrations/versions/969126bd800f_.py b/migrations/versions/969126bd800f_.py index 4a476ef956..a62bd840d3 100644 --- a/migrations/versions/969126bd800f_.py +++ b/migrations/versions/969126bd800f_.py @@ -5,7 +5,7 @@ Create Date: 2018-01-31 15:20:30.396533 """ -from __future__ import print_function + import simplejson from alembic import op import sqlalchemy as sa diff --git a/redash/__init__.py b/redash/__init__.py index 68be062962..87ac101c34 100644 --- a/redash/__init__.py +++ b/redash/__init__.py @@ -2,8 +2,6 @@ import logging import os import sys -import urllib -import urlparse import redis from flask_mail import Mail @@ -41,6 +39,7 @@ def setup_logging(): setup_logging() redis_connection = redis.from_url(settings.REDIS_URL) +rq_redis_connection = redis.from_url(settings.RQ_REDIS_URL) mail = Mail() migrate = Migrate() statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX) diff --git a/redash/app.py b/redash/app.py index e4e1335561..dcb8e6d9a9 100644 --- a/redash/app.py +++ b/redash/app.py @@ -10,7 +10,7 @@ def __init__(self, *args, **kwargs): kwargs.update({ 'template_folder': settings.STATIC_ASSETS_PATH, 'static_folder': settings.STATIC_ASSETS_PATH, - 'static_path': '/static', + 'static_url_path': '/static', }) super(Redash, self).__init__(__name__, *args, **kwargs) # Make sure we get the right referral address even behind proxies like nginx. diff --git a/redash/authentication/__init__.py b/redash/authentication/__init__.py index 989ed52b11..d0058694d0 100644 --- a/redash/authentication/__init__.py +++ b/redash/authentication/__init__.py @@ -2,7 +2,7 @@ import hmac import logging import time -from urlparse import urlsplit, urlunsplit +from urllib.parse import urlsplit, urlunsplit from flask import jsonify, redirect, request, url_for from flask_login import LoginManager, login_user, logout_user, user_logged_in @@ -33,8 +33,8 @@ def sign(key, path, expires): if not key: return None - h = hmac.new(str(key), msg=path, digestmod=hashlib.sha1) - h.update(str(expires)) + h = hmac.new(key.encode(), msg=path.encode(), digestmod=hashlib.sha1) + h.update(str(expires).encode()) return h.hexdigest() @@ -93,7 +93,7 @@ def hmac_load_user_from_request(request): calculated_signature = sign(query.api_key, request.path, expires) if query.api_key and signature == calculated_signature: - return models.ApiUser(query.api_key, query.org, query.groups.keys(), name="ApiKey: Query {}".format(query.id)) + return models.ApiUser(query.api_key, query.org, list(query.groups.keys()), name="ApiKey: Query {}".format(query.id)) return None @@ -118,7 +118,7 @@ def get_user_from_api_key(api_key, query_id): if query_id: query = models.Query.get_by_id_and_org(query_id, org) if query and query.api_key == api_key: - user = models.ApiUser(api_key, query.org, query.groups.keys(), name="ApiKey: Query {}".format(query.id)) + user = models.ApiUser(api_key, query.org, list(query.groups.keys()), name="ApiKey: Query {}".format(query.id)) return user @@ -271,4 +271,10 @@ def get_next_path(unsafe_next_path): parts[1] = '' # clear netloc safe_next_path = urlunsplit(parts) + # If the original path was a URL, we might end up with an empty + # safe url, which will redirect to the login page. Changing to + # relative root to redirect to the app root after login. + if not safe_next_path: + safe_next_path = './' + return safe_next_path diff --git a/redash/authentication/account.py b/redash/authentication/account.py index 16bd90c811..c20b60aab2 100644 --- a/redash/authentication/account.py +++ b/redash/authentication/account.py @@ -48,7 +48,7 @@ def send_verify_email(user, org): } html_content = render_template('emails/verify.html', **context) text_content = render_template('emails/verify.txt', **context) - subject = u"{}, please verify your email address".format(user.name) + subject = "{}, please verify your email address".format(user.name) send_mail.delay([user.email], subject, html_content, text_content) @@ -57,7 +57,7 @@ def send_invite_email(inviter, invited, invite_url, org): context = dict(inviter=inviter, invited=invited, org=org, invite_url=invite_url) html_content = render_template('emails/invite.html', **context) text_content = render_template('emails/invite.txt', **context) - subject = u"{} invited you to join Redash".format(inviter.name) + subject = "{} invited you to join Redash".format(inviter.name) send_mail.delay([invited.email], subject, html_content, text_content) @@ -67,7 +67,7 @@ def send_password_reset_email(user): context = dict(user=user, reset_link=reset_link) html_content = render_template('emails/reset.html', **context) text_content = render_template('emails/reset.txt', **context) - subject = u"Reset your password" + subject = "Reset your password" send_mail.delay([user.email], subject, html_content, text_content) return reset_link @@ -76,6 +76,6 @@ def send_password_reset_email(user): def send_user_disabled_email(user): html_content = render_template('emails/reset_disabled.html', user=user) text_content = render_template('emails/reset_disabled.txt', user=user) - subject = u"Your Redash account is disabled" + subject = "Your Redash account is disabled" send_mail.delay([user.email], subject, html_content, text_content) diff --git a/redash/cli/__init__.py b/redash/cli/__init__.py index 229f80ea5b..d3bc522f8a 100644 --- a/redash/cli/__init__.py +++ b/redash/cli/__init__.py @@ -1,4 +1,4 @@ -from __future__ import print_function + import click import simplejson @@ -53,7 +53,7 @@ def status(): @manager.command() def check_settings(): """Show the settings as Redash sees them (useful for debugging).""" - for name, item in current_app.config.iteritems(): + for name, item in current_app.config.items(): print("{} = {}".format(name, item)) diff --git a/redash/cli/data_sources.py b/redash/cli/data_sources.py index acd9d51909..90d33fae7e 100644 --- a/redash/cli/data_sources.py +++ b/redash/cli/data_sources.py @@ -1,4 +1,4 @@ -from __future__ import print_function + from sys import exit import click @@ -15,11 +15,11 @@ manager = AppGroup(help="Data sources management commands.") -@manager.command() +@manager.command(name='list') @click.option('--org', 'organization', default=None, help="The organization the user belongs to (leave blank for " "all organizations).") -def list(organization=None): +def list_command(organization=None): """List currently configured data sources.""" if organization: org = models.Organization.get_by_slug(organization) @@ -99,11 +99,11 @@ def new(name=None, type=None, options=None, organization='default'): print("{}. {}".format(i + 1, query_runner_name)) idx = 0 - while idx < 1 or idx > len(query_runners.keys()): + while idx < 1 or idx > len(list(query_runners.keys())): idx = click.prompt("[{}-{}]".format(1, len(query_runners.keys())), type=int) - type = query_runners.keys()[idx - 1] + type = list(query_runners.keys())[idx - 1] else: validate_data_source_type(type) @@ -119,7 +119,7 @@ def new(name=None, type=None, options=None, organization='default'): options_obj = {} - for k, prop in schema['properties'].iteritems(): + for k, prop in schema['properties'].items(): required = k in schema.get('required', []) default_value = "<>" if required: diff --git a/redash/cli/groups.py b/redash/cli/groups.py index 4e2b353c0e..195d3c466a 100644 --- a/redash/cli/groups.py +++ b/redash/cli/groups.py @@ -1,4 +1,4 @@ -from __future__ import print_function + from sys import exit from sqlalchemy.orm.exc import NoResultFound @@ -36,7 +36,7 @@ def create(name, permissions=None, organization='default'): permissions=permissions)) models.db.session.commit() except Exception as e: - print("Failed create group: %s" % e.message) + print("Failed create group: %s" % e) exit(1) @@ -67,7 +67,7 @@ def change_permissions(group_id, permissions=None): models.db.session.add(group) models.db.session.commit() except Exception as e: - print("Failed change permission: %s" % e.message) + print("Failed change permission: %s" % e) exit(1) @@ -80,10 +80,10 @@ def extract_permissions_string(permissions): return permissions -@manager.command() +@manager.command(name='list') @option('--org', 'organization', default=None, help="The organization to limit to (leave blank for all).") -def list(organization=None): +def list_command(organization=None): """List all groups""" if organization: org = models.Organization.get_by_slug(organization) diff --git a/redash/cli/organization.py b/redash/cli/organization.py index 6a863e18c5..ad45a3fc38 100644 --- a/redash/cli/organization.py +++ b/redash/cli/organization.py @@ -1,4 +1,4 @@ -from __future__ import print_function + from click import argument from flask.cli import AppGroup @@ -29,8 +29,8 @@ def show_google_apps_domains(): ', '.join(organization.google_apps_domains))) -@manager.command() -def list(): +@manager.command(name='list') +def list_command(): """List all organizations""" orgs = models.Organization.query for i, org in enumerate(orgs.order_by(models.Organization.name)): diff --git a/redash/cli/rq.py b/redash/cli/rq.py index 76e5544f2d..357f7cfd2d 100644 --- a/redash/cli/rq.py +++ b/redash/cli/rq.py @@ -7,7 +7,7 @@ from flask.cli import AppGroup from rq import Connection, Worker -from redash import redis_connection +from redash import rq_redis_connection from redash.schedule import rq_scheduler, schedule_periodic_jobs manager = AppGroup(help="RQ management commands.") @@ -22,7 +22,9 @@ def scheduler(): @manager.command() @argument('queues', nargs=-1) def worker(queues='default'): - with Connection(redis_connection): + if not queues: + queues = ('default',) + with Connection(rq_redis_connection): w = Worker(queues) w.work() @@ -30,7 +32,7 @@ def worker(queues='default'): @manager.command() def healthcheck(): hostname = socket.gethostname() - with Connection(redis_connection): + with Connection(rq_redis_connection): all_workers = Worker.all() local_workers = [w for w in all_workers if w.hostname == hostname] diff --git a/redash/cli/users.py b/redash/cli/users.py index 0f6679eb26..72b6a5544e 100644 --- a/redash/cli/users.py +++ b/redash/cli/users.py @@ -1,4 +1,4 @@ -from __future__ import print_function + from sys import exit from click import BOOL, argument, option, prompt @@ -93,7 +93,7 @@ def create(email, name, groups, is_admin=False, google_auth=False, models.db.session.add(user) models.db.session.commit() except Exception as e: - print("Failed creating user: %s" % e.message) + print("Failed creating user: %s" % e) exit(1) @@ -141,7 +141,7 @@ def create_root(email, name, google_auth=False, password=None, organization='def models.db.session.add(user) models.db.session.commit() except Exception as e: - print("Failed creating root user: %s" % e.message) + print("Failed creating root user: %s" % e) exit(1) @@ -222,7 +222,7 @@ def invite(email, name, inviter_email, groups, is_admin=False, invite_user(org, user_from, user) print("An invitation was sent to [%s] at [%s]." % (name, email)) except IntegrityError as e: - if "email" in e.message: + if "email" in str(e): print("Cannot invite. User already exists [%s]" % email) else: print(e) @@ -230,11 +230,11 @@ def invite(email, name, inviter_email, groups, is_admin=False, print("The inviter [%s] was not found." % inviter_email) -@manager.command() +@manager.command(name='list') @option('--org', 'organization', default=None, help="The organization the user belongs to (leave blank for all" " organizations)") -def list(organization=None): +def list_command(organization=None): """List all users""" if organization: org = models.Organization.get_by_slug(organization) @@ -246,7 +246,7 @@ def list(organization=None): print("-" * 20) print("Id: {}\nName: {}\nEmail: {}\nOrganization: {}\nActive: {}".format( - user.id, user.name.encode('utf-8'), user.email, user.org.name, not(user.is_disabled))) + user.id, user.name, user.email, user.org.name, not(user.is_disabled))) groups = models.Group.query.filter(models.Group.id.in_(user.group_ids)).all() group_names = [group.name for group in groups] diff --git a/redash/destinations/chatwork.py b/redash/destinations/chatwork.py index d80379998c..24522a0ce4 100644 --- a/redash/destinations/chatwork.py +++ b/redash/destinations/chatwork.py @@ -5,7 +5,7 @@ class ChatWork(BaseDestination): - ALERTS_DEFAULT_MESSAGE_TEMPLATE = u'{alert_name} changed state to {new_state}.\\n{alert_url}\\n{query_url}' + ALERTS_DEFAULT_MESSAGE_TEMPLATE = '{alert_name} changed state to {new_state}.\\n{alert_url}\\n{query_url}' @classmethod def configuration_schema(cls): diff --git a/redash/destinations/hipchat.py b/redash/destinations/hipchat.py index 41b61de1fa..8d41e97d9a 100644 --- a/redash/destinations/hipchat.py +++ b/redash/destinations/hipchat.py @@ -37,7 +37,7 @@ def notify(self, alert, query, user, new_state, app, host, options): alert_url = '{host}/alerts/{alert_id}'.format(host=host, alert_id=alert.id) query_url = '{host}/queries/{query_id}'.format(host=host, query_id=query.id) - message = u'{alert_name} changed state to {new_state} (based on this query).'.format( + message = '{alert_name} changed state to {new_state} (based on this query).'.format( alert_name=alert.name, new_state=new_state.upper(), alert_url=alert_url, query_url=query_url) diff --git a/redash/destinations/pagerduty.py b/redash/destinations/pagerduty.py index ecb62adb85..3ed49ad017 100644 --- a/redash/destinations/pagerduty.py +++ b/redash/destinations/pagerduty.py @@ -12,7 +12,7 @@ class PagerDuty(BaseDestination): KEY_STRING = '{alert_id}_{query_id}' - DESCRIPTION_STR = u'Alert: {alert_name}' + DESCRIPTION_STR = 'Alert: {alert_name}' @classmethod def enabled(cls): diff --git a/redash/extensions.py b/redash/extensions.py index d9b70e8bb1..8d7481ebb2 100644 --- a/redash/extensions.py +++ b/redash/extensions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import logging from collections import OrderedDict as odict diff --git a/redash/handlers/data_sources.py b/redash/handlers/data_sources.py index 49bbace121..7ab2ffe43c 100644 --- a/redash/handlers/data_sources.py +++ b/redash/handlers/data_sources.py @@ -19,7 +19,7 @@ class DataSourceTypeListResource(BaseResource): @require_admin def get(self): - available_query_runners = filter(lambda q: not q.deprecated, query_runners.values()) + available_query_runners = [q for q in query_runners.values() if not q.deprecated] return [q.to_dict() for q in sorted(available_query_runners, key=lambda q: q.name())] @@ -56,7 +56,7 @@ def post(self, data_source_id): try: models.db.session.commit() except IntegrityError as e: - if req['name'] in e.message: + if req['name'] in str(e): abort(400, message="Data source with the name {} already exists.".format(req['name'])) abort(400) @@ -109,7 +109,7 @@ def get(self): 'object_type': 'datasource', }) - return sorted(response.values(), key=lambda d: d['name'].lower()) + return sorted(list(response.values()), key=lambda d: d['name'].lower()) @require_admin def post(self): @@ -132,7 +132,7 @@ def post(self): models.db.session.commit() except IntegrityError as e: - if req['name'] in e.message: + if req['name'] in str(e): abort(400, message="Data source with the name {} already exists.".format(req['name'])) abort(400) diff --git a/redash/handlers/destinations.py b/redash/handlers/destinations.py index dee068873d..261da2bb39 100644 --- a/redash/handlers/destinations.py +++ b/redash/handlers/destinations.py @@ -13,7 +13,7 @@ class DestinationTypeListResource(BaseResource): @require_admin def get(self): - available_destinations = filter(lambda q: not q.deprecated, destinations.values()) + available_destinations = [q for q in destinations.values() if not q.deprecated] return [q.to_dict() for q in available_destinations] @@ -48,8 +48,8 @@ def post(self, destination_id): except ValidationError: abort(400) except IntegrityError as e: - if 'name' in e.message: - abort(400, message=u"Alert Destination with the name {} already exists.".format(req['name'])) + if 'name' in str(e): + abort(400, message="Alert Destination with the name {} already exists.".format(req['name'])) abort(500) return destination.to_dict(all=True) @@ -87,7 +87,7 @@ def get(self): 'object_type': 'destination', }) - return response.values() + return list(response.values()) @require_admin def post(self): @@ -112,8 +112,8 @@ def post(self): models.db.session.add(destination) models.db.session.commit() except IntegrityError as e: - if 'name' in e.message: - abort(400, message=u"Alert Destination with the name {} already exists.".format(req['name'])) + if 'name' in str(e): + abort(400, message="Alert Destination with the name {} already exists.".format(req['name'])) abort(500) return destination.to_dict(all=True) diff --git a/redash/handlers/embed.py b/redash/handlers/embed.py index 62805f3a5d..f677b29a14 100644 --- a/redash/handlers/embed.py +++ b/redash/handlers/embed.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import + from flask import request diff --git a/redash/handlers/events.py b/redash/handlers/events.py index ec172ac97c..10b09356ee 100644 --- a/redash/handlers/events.py +++ b/redash/handlers/events.py @@ -1,5 +1,6 @@ from flask import request -from geoip import geolite2 +import geolite2 +import maxminddb from user_agents import parse as parse_ua from redash.handlers.base import BaseResource, paginate @@ -10,11 +11,12 @@ def get_location(ip): if ip is None: return "Unknown" - match = geolite2.lookup(ip) - if match is None: - return "Unknown" - - return match.country + with maxminddb.open_database(geolite2.geolite2_database()) as reader: + try: + match = reader.get(ip) + return match['country']['names']['en'] + except Exception: + return "Unknown" def event_details(event): diff --git a/redash/handlers/favorites.py b/redash/handlers/favorites.py index 1c2eff32b3..fe46ad673b 100644 --- a/redash/handlers/favorites.py +++ b/redash/handlers/favorites.py @@ -18,7 +18,7 @@ def post(self, query_id): try: models.db.session.commit() except IntegrityError as e: - if 'unique_favorite' in e.message: + if 'unique_favorite' in str(e): models.db.session.rollback() else: raise e @@ -35,7 +35,7 @@ def delete(self, query_id): models.Favorite.query.filter( models.Favorite.object_id == query_id, - models.Favorite.object_type == u'Query', + models.Favorite.object_type == 'Query', models.Favorite.user == self.current_user, ).delete() models.db.session.commit() @@ -56,7 +56,7 @@ def post(self, object_id): try: models.db.session.commit() except IntegrityError as e: - if 'unique_favorite' in e.message: + if 'unique_favorite' in str(e): models.db.session.rollback() else: raise e diff --git a/redash/handlers/queries.py b/redash/handlers/queries.py index 0f8652cf18..1e53633aaa 100644 --- a/redash/handlers/queries.py +++ b/redash/handlers/queries.py @@ -336,7 +336,7 @@ def post(self, query_id): query_def['query_text'] = query_def.pop('query') if 'tags' in query_def: - query_def['tags'] = filter(None, query_def['tags']) + query_def['tags'] = [tag for tag in query_def['tags'] if tag] query_def['last_modified_by'] = self.current_user query_def['changed_by'] = self.current_user diff --git a/redash/handlers/query_results.py b/redash/handlers/query_results.py index 5fdf8e7314..28d458863e 100644 --- a/redash/handlers/query_results.py +++ b/redash/handlers/query_results.py @@ -43,7 +43,7 @@ def run_query(query, parameters, data_source, query_id, max_age=0): abort(400, message=e.message) if query.missing_params: - return error_response(u'Missing parameter value for: {}'.format(u", ".join(query.missing_params))) + return error_response('Missing parameter value for: {}'.format(", ".join(query.missing_params))) if max_age == 0: query_result = None @@ -76,7 +76,7 @@ def get_download_filename(query_result, query, filetype): filename = to_filename(query.name) if query.name != '' else str(query.id) else: filename = str(query_result.id) - return u"{}_{}.{}".format(filename, retrieved_at, filetype) + return "{}_{}.{}".format(filename, retrieved_at, filetype) class QueryResultListResource(BaseResource): @@ -288,7 +288,7 @@ def get(self, query_id=None, query_result_id=None, filetype='json'): response.headers.add_header( "Content-Disposition", - 'attachment; filename="{}"'.format(filename.encode("utf-8")) + 'attachment; filename="{}"'.format(filename) ) return response diff --git a/redash/handlers/settings.py b/redash/handlers/settings.py index 77dab014cf..84e35b5668 100644 --- a/redash/handlers/settings.py +++ b/redash/handlers/settings.py @@ -10,7 +10,7 @@ def get_settings_with_defaults(defaults, org): values = org.settings.get('settings', {}) settings = {} - for setting, default_value in defaults.iteritems(): + for setting, default_value in defaults.items(): current_value = values.get(setting) if current_value is None and default_value is None: continue @@ -42,7 +42,7 @@ def post(self): self.current_org.settings['settings'] = {} previous_values = {} - for k, v in new_values.iteritems(): + for k, v in new_values.items(): if k == 'auth_google_apps_domains': previous_values[k] = self.current_org.google_apps_domains self.current_org.settings[Organization.SETTING_GOOGLE_APPS_DOMAINS] = v diff --git a/redash/handlers/users.py b/redash/handlers/users.py index 0eca25bd01..68bdc2d188 100644 --- a/redash/handlers/users.py +++ b/redash/handlers/users.py @@ -138,7 +138,7 @@ def post(self): models.db.session.add(user) models.db.session.commit() except IntegrityError as e: - if "email" in e.message: + if "email" in str(e): abort(400, message='Email already taken.') abort(500) @@ -263,7 +263,7 @@ def post(self, user_id): if current_user.id == user.id: login_user(user, remember=True) except IntegrityError as e: - if "email" in e.message: + if "email" in str(e): message = "Email already taken." else: message = "Error updating record" @@ -274,7 +274,7 @@ def post(self, user_id): 'action': 'edit', 'object_id': user.id, 'object_type': 'user', - 'updated_fields': params.keys() + 'updated_fields': list(params.keys()) }) return user.to_dict(with_api_key=is_admin_or_owner(user_id)) diff --git a/redash/metrics/celery.py b/redash/metrics/celery.py index 64cddc963d..b7e0e2b645 100644 --- a/redash/metrics/celery.py +++ b/redash/metrics/celery.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import + import logging import socket @@ -29,7 +29,7 @@ def metric_name(name, tags): if not settings.STATSD_USE_TAGS: return name - tags_string = ",".join(["{}={}".format(k, v) for k, v in tags.iteritems()]) + tags_string = ",".join(["{}={}".format(k, v) for k, v in tags.items()]) return "{},{}".format(name, tags_string) diff --git a/redash/models/__init__.py b/redash/models/__init__.py index 25744d264f..60aaecdb9a 100644 --- a/redash/models/__init__.py +++ b/redash/models/__init__.py @@ -4,7 +4,7 @@ import time import pytz -from six import python_2_unicode_compatible, text_type +from six import text_type from sqlalchemy import distinct, or_, and_, UniqueConstraint from sqlalchemy.dialects import postgresql from sqlalchemy.event import listens_for @@ -62,7 +62,6 @@ def get(self, query_id): scheduled_queries_executions = ScheduledQueriesExecutions() -@python_2_unicode_compatible @generic_repr('id', 'name', 'type', 'org_id', 'created_at') class DataSource(BelongsToOrgMixin, db.Model): id = Column(db.Integer, primary_key=True) @@ -84,6 +83,9 @@ class DataSource(BelongsToOrgMixin, db.Model): def __eq__(self, other): return self.id == other.id + def __hash__(self): + return hash(self.id) + def to_dict(self, all=False, with_permissions_for=None): d = { 'id': self.id, @@ -216,7 +218,7 @@ def groups(self): groups = DataSourceGroup.query.filter( DataSourceGroup.data_source == self ) - return dict(map(lambda g: (g.group_id, g.view_only), groups)) + return dict([(group.group_id, group.view_only) for group in groups]) @generic_repr('id', 'data_source_id', 'group_id', 'view_only') @@ -254,7 +256,6 @@ def data(self, data): QueryResultPersistence = settings.dynamic_settings.QueryResultPersistence or DBPersistence -@python_2_unicode_compatible @generic_repr('id', 'org_id', 'data_source_id', 'query_hash', 'runtime', 'retrieved_at') class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin): id = Column(db.Integer, primary_key=True) @@ -271,7 +272,7 @@ class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin): __tablename__ = 'query_results' def __str__(self): - return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at) + return "%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at) def to_dict(self): return { @@ -373,7 +374,6 @@ def should_schedule_next(previous_iteration, now, interval, time=None, day_of_we return now > next_iteration -@python_2_unicode_compatible @gfk_type @generic_repr('id', 'name', 'query_hash', 'version', 'user_id', 'org_id', 'data_source_id', 'query_hash', 'last_modified_by_id', @@ -502,7 +502,7 @@ def favorites(cls, user, base_query=None): return base_query.join(( Favorite, and_( - Favorite.object_type == u'Query', + Favorite.object_type == 'Query', Favorite.object_id == Query.id ) )).filter(Favorite.user_id == user.id) @@ -543,13 +543,9 @@ def past_scheduled_queries(cls): .filter(Query.schedule.isnot(None)) .order_by(Query.id) ) - return filter( - lambda x: - x.schedule["until"] is not None and pytz.utc.localize( - datetime.datetime.strptime(x.schedule['until'], '%Y-%m-%d') - ) <= now, - queries - ) + return [query for query in queries if query.schedule["until"] is not None and pytz.utc.localize( + datetime.datetime.strptime(query.schedule['until'], '%Y-%m-%d') + ) <= now] @classmethod def outdated_queries(cls): @@ -586,7 +582,7 @@ def outdated_queries(cls): key = "{}:{}".format(query.query_hash, query.data_source_id) outdated_queries[key] = query - return outdated_queries.values() + return list(outdated_queries.values()) @classmethod def search(cls, term, group_ids, user_id=None, include_drafts=False, @@ -600,7 +596,7 @@ def search(cls, term, group_ids, user_id=None, include_drafts=False, if multi_byte_search: # Since tsvector doesn't work well with CJK languages, use `ilike` too - pattern = u'%{}%'.format(term) + pattern = '%{}%'.format(term) return all_queries.filter( or_( cls.name.ilike(pattern), @@ -678,7 +674,7 @@ def fork(self, user): kwargs = {a: getattr(self, a) for a in forked_list} # Query.create will add default TABLE visualization, so use constructor to create bare copy of query - forked_query = Query(name=u'Copy of (#{}) {}'.format(self.id, self.name), user=user, **kwargs) + forked_query = Query(name='Copy of (#{}) {}'.format(self.id, self.name), user=user, **kwargs) for v in sorted(self.visualizations, key=lambda v: v.id): forked_v = v.copy() @@ -776,7 +772,7 @@ def are_favorites(cls, user, objects): return [] object_type = text_type(objects[0].__class__.__name__) - return map(lambda fav: fav.object_id, cls.query.filter(cls.object_id.in_(map(lambda o: o.id, objects)), cls.object_type == object_type, cls.user_id == user)) + return [fav.object_id for fav in cls.query.filter(cls.object_id.in_([o.id for o in objects]), cls.object_type == object_type, cls.user_id == user)] @generic_repr('id', 'name', 'query_id', 'user_id', 'state', 'last_triggered_at', 'rearm') @@ -904,7 +900,6 @@ def generate_slug(ctx): return slug -@python_2_unicode_compatible @gfk_type @generic_repr('id', 'name', 'slug', 'user_id', 'org_id', 'version', 'is_archived', 'is_draft') class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model): @@ -930,7 +925,7 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model } def __str__(self): - return u"%s=%s" % (self.id, self.name) + return "%s=%s" % (self.id, self.name) @classmethod def all(cls, org, group_ids, user_id): @@ -958,7 +953,7 @@ def all(cls, org, group_ids, user_id): @classmethod def search(cls, org, groups_ids, user_id, search_term): # TODO: switch to FTS - return cls.all(org, groups_ids, user_id).filter(cls.name.ilike(u'%{}%'.format(search_term))) + return cls.all(org, groups_ids, user_id).filter(cls.name.ilike('%{}%'.format(search_term))) @classmethod def all_tags(cls, org, user): @@ -984,7 +979,7 @@ def favorites(cls, user, base_query=None): ( Favorite, and_( - Favorite.object_type == u'Dashboard', + Favorite.object_type == 'Dashboard', Favorite.object_id == Dashboard.id ) ) @@ -1005,7 +1000,6 @@ def lowercase_name(cls): return func.lower(cls.name) -@python_2_unicode_compatible @generic_repr('id', 'name', 'type', 'query_id') class Visualization(TimestampMixin, BelongsToOrgMixin, db.Model): id = Column(db.Integer, primary_key=True) @@ -1020,7 +1014,7 @@ class Visualization(TimestampMixin, BelongsToOrgMixin, db.Model): __tablename__ = 'visualizations' def __str__(self): - return u"%s %s" % (self.id, self.type) + return "%s %s" % (self.id, self.type) @classmethod def get_by_id_and_org(cls, object_id, org): @@ -1035,7 +1029,6 @@ def copy(self): } -@python_2_unicode_compatible @generic_repr('id', 'visualization_id', 'dashboard_id') class Widget(TimestampMixin, BelongsToOrgMixin, db.Model): id = Column(db.Integer, primary_key=True) @@ -1049,14 +1042,13 @@ class Widget(TimestampMixin, BelongsToOrgMixin, db.Model): __tablename__ = 'widgets' def __str__(self): - return u"%s" % self.id + return "%s" % self.id @classmethod def get_by_id_and_org(cls, object_id, org): return super(Widget, cls).get_by_id_and_org(object_id, org, Dashboard) -@python_2_unicode_compatible @generic_repr('id', 'object_type', 'object_id', 'action', 'user_id', 'org_id', 'created_at') class Event(db.Model): id = Column(db.Integer, primary_key=True) @@ -1073,7 +1065,7 @@ class Event(db.Model): __tablename__ = 'events' def __str__(self): - return u"%s,%s,%s,%s" % (self.user_id, self.action, self.object_type, self.object_id) + return "%s,%s,%s,%s" % (self.user_id, self.action, self.object_type, self.object_id) def to_dict(self): return { @@ -1139,7 +1131,6 @@ def create_for_object(cls, object, user): return k -@python_2_unicode_compatible @generic_repr('id', 'name', 'type', 'user_id', 'org_id', 'created_at') class NotificationDestination(BelongsToOrgMixin, db.Model): id = Column(db.Integer, primary_key=True) diff --git a/redash/models/organizations.py b/redash/models/organizations.py index 28df95b918..88799d767c 100644 --- a/redash/models/organizations.py +++ b/redash/models/organizations.py @@ -1,4 +1,3 @@ -from six import python_2_unicode_compatible from sqlalchemy.orm.attributes import flag_modified from sqlalchemy_utils.models import generic_repr @@ -10,7 +9,6 @@ from .users import User, Group -@python_2_unicode_compatible @generic_repr('id', 'name', 'slug') class Organization(TimestampMixin, db.Model): SETTING_GOOGLE_APPS_DOMAINS = 'google_apps_domains' @@ -26,7 +24,7 @@ class Organization(TimestampMixin, db.Model): __tablename__ = 'organizations' def __str__(self): - return u'%s (%s)' % (self.name, self.id) + return '%s (%s)' % (self.name, self.id) @classmethod def get_by_slug(cls, slug): diff --git a/redash/models/parameterized_query.py b/redash/models/parameterized_query.py index 2002e4b7a2..81ddde18c6 100644 --- a/redash/models/parameterized_query.py +++ b/redash/models/parameterized_query.py @@ -33,19 +33,19 @@ def dropdown_values(query_id, org): data = _load_result(query_id, org) first_column = data["columns"][0]["name"] pluck = partial(_pluck_name_and_value, first_column) - return map(pluck, data["rows"]) + return list(map(pluck, data["rows"])) def join_parameter_list_values(parameters, schema): updated_parameters = {} - for (key, value) in parameters.iteritems(): + for (key, value) in parameters.items(): if isinstance(value, list): definition = next((definition for definition in schema if definition["name"] == key), {}) multi_values_options = definition.get('multiValuesOptions', {}) separator = str(multi_values_options.get('separator', ',')) prefix = str(multi_values_options.get('prefix', '')) suffix = str(multi_values_options.get('suffix', '')) - updated_parameters[key] = separator.join(map(lambda v: prefix + v + suffix, value)) + updated_parameters[key] = separator.join([prefix + v + suffix for v in value]) else: updated_parameters[key] = value return updated_parameters @@ -71,10 +71,10 @@ def _collect_query_parameters(query): def _parameter_names(parameter_values): names = [] - for key, value in parameter_values.iteritems(): + for key, value in parameter_values.items(): if isinstance(value, dict): for inner_key in value.keys(): - names.append(u'{}.{}'.format(key, inner_key)) + names.append('{}.{}'.format(key, inner_key)) else: names.append(key) @@ -122,7 +122,7 @@ def __init__(self, template, schema=None, org=None): self.parameters = {} def apply(self, parameters): - invalid_parameter_names = [key for (key, value) in parameters.iteritems() if not self._valid(key, value)] + invalid_parameter_names = [key for (key, value) in parameters.items() if not self._valid(key, value)] if invalid_parameter_names: raise InvalidParameterError(invalid_parameter_names) else: @@ -170,7 +170,7 @@ def _valid(self, name, value): @property def is_safe(self): - text_parameters = filter(lambda p: p["type"] == "text", self.schema) + text_parameters = [param for param in self.schema if param["type"] == "text"] return not any(text_parameters) @property @@ -185,8 +185,8 @@ def text(self): class InvalidParameterError(Exception): def __init__(self, parameters): - parameter_names = u", ".join(parameters) - message = u"The following parameter values are incompatible with their definitions: {}".format(parameter_names) + parameter_names = ", ".join(parameters) + message = "The following parameter values are incompatible with their definitions: {}".format(parameter_names) super(InvalidParameterError, self).__init__(message) diff --git a/redash/models/users.py b/redash/models/users.py index c514680707..724bfeafcf 100644 --- a/redash/models/users.py +++ b/redash/models/users.py @@ -8,7 +8,7 @@ from flask import current_app as app, url_for, request_started from flask_login import current_user, AnonymousUserMixin, UserMixin from passlib.apps import custom_app_context as pwd_context -from six import python_2_unicode_compatible, string_types, text_type +from six import string_types, text_type from sqlalchemy.exc import DBAPIError from sqlalchemy.dialects import postgresql @@ -69,14 +69,12 @@ def has_permission(self, permission): def has_permissions(self, permissions): has_permissions = reduce(lambda a, b: a and b, - map(lambda permission: permission in self.permissions, - permissions), + [permission in self.permissions for permission in permissions], True) return has_permissions -@python_2_unicode_compatible @generic_repr('id', 'name', 'email') class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCheckMixin): id = Column(db.Integer, primary_key=True) @@ -105,7 +103,7 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh ) def __str__(self): - return u'%s (%s)' % (self.name, self.email) + return '%s (%s)' % (self.name, self.email) def __init__(self, *args, **kwargs): if kwargs.get('email') is not None: @@ -165,7 +163,7 @@ def profile_image_url(self): if self._profile_image_url is not None: return self._profile_image_url - email_md5 = hashlib.md5(self.email.lower()).hexdigest() + email_md5 = hashlib.md5(self.email.lower().encode()).hexdigest() return "https://www.gravatar.com/avatar/{}?s=40&d=identicon".format(email_md5) @property @@ -200,7 +198,7 @@ def all_disabled(cls, org): @classmethod def search(cls, base_query, term): - term = u'%{}%'.format(term) + term = '%{}%'.format(term) search_filter = or_(cls.name.ilike(term), cls.email.like(term)) return base_query.filter(search_filter) @@ -234,12 +232,11 @@ def has_access(self, obj, access_type): def get_id(self): identity = hashlib.md5( - "{},{}".format(self.email, self.password_hash) + "{},{}".format(self.email, self.password_hash).encode() ).hexdigest() - return u"{0}-{1}".format(self.id, identity) + return "{0}-{1}".format(self.id, identity) -@python_2_unicode_compatible @generic_repr('id', 'name', 'type', 'org_id') class Group(db.Model, BelongsToOrgMixin): DEFAULT_PERMISSIONS = ['create_dashboard', 'create_query', 'edit_dashboard', 'edit_query', @@ -382,7 +379,7 @@ def __init__(self, api_key, org, groups, name=None): self.org = org def __repr__(self): - return u"<{}>".format(self.name) + return "<{}>".format(self.name) def is_api_user(self): return True diff --git a/redash/monitor.py b/redash/monitor.py index 7c4f4e5a80..76914e4de3 100644 --- a/redash/monitor.py +++ b/redash/monitor.py @@ -35,8 +35,8 @@ def get_celery_queues(): def get_queues_status(): - return dict({queue: {'size': redis_connection.llen(queue)} for queue in get_celery_queues()}.items() + - {queue.name: {'size': len(queue)} for queue in Queue.all(connection=redis_connection)}.items()) + return {**{queue: {'size': redis_connection.llen(queue)} for queue in get_celery_queues()}, + **{queue.name: {'size': len(queue)} for queue in Queue.all(connection=redis_connection)}} def get_db_sizes(): diff --git a/redash/query_runner/__init__.py b/redash/query_runner/__init__.py index 52175a93b7..a3d08145d0 100644 --- a/redash/query_runner/__init__.py +++ b/redash/query_runner/__init__.py @@ -83,8 +83,8 @@ def annotate_query(self, query, metadata): if not self.should_annotate_query: return query - annotation = u", ".join([u"{}: {}".format(k, v) for k, v in metadata.iteritems()]) - annotated_query = u"/* {} */ {}".format(annotation, query) + annotation = ", ".join(["{}: {}".format(k, v) for k, v in metadata.items()]) + annotated_query = "/* {} */ {}".format(annotation, query) return annotated_query def test_connection(self): @@ -142,7 +142,7 @@ def get_schema(self, get_stats=False): self._get_tables(schema_dict) if settings.SCHEMA_RUN_TABLE_SIZE_CALCULATIONS and get_stats: self._get_tables_stats(schema_dict) - return schema_dict.values() + return list(schema_dict.values()) def _get_tables(self, schema_dict): return [] diff --git a/redash/query_runner/athena.py b/redash/query_runner/athena.py index 82d6d7abc7..52f32d3f33 100644 --- a/redash/query_runner/athena.py +++ b/redash/query_runner/athena.py @@ -183,7 +183,7 @@ def __get_schema_from_glue(self): schema[table_name] = {'name': table_name, 'columns': column} for partition in table.get('PartitionKeys', []): schema[table_name]['columns'].append(partition['Name']) - return schema.values() + return list(schema.values()) def get_schema(self, get_stats=False): if self.configuration.get('glue', False): @@ -207,7 +207,7 @@ def get_schema(self, get_stats=False): schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'].append(row['column_name']) - return schema.values() + return list(schema.values()) def run_query(self, query, user): cursor = pyathena.connect( diff --git a/redash/query_runner/axibase_tsd.py b/redash/query_runner/axibase_tsd.py index d76de9ee29..24aa5f3321 100644 --- a/redash/query_runner/axibase_tsd.py +++ b/redash/query_runner/axibase_tsd.py @@ -192,7 +192,7 @@ def get_schema(self, get_stats=False): for table_name in metrics_list: schema[table_name] = {'name': "'{}'".format(table_name), 'columns': default_columns} - values = schema.values() + values = list(schema.values()) return values diff --git a/redash/query_runner/azure_kusto.py b/redash/query_runner/azure_kusto.py index d045f54d71..fa8c4a85f3 100644 --- a/redash/query_runner/azure_kusto.py +++ b/redash/query_runner/azure_kusto.py @@ -150,7 +150,7 @@ def get_schema(self, get_stats=False): for column in table['OrderedColumns']: schema[table_name]['columns'].append(column['Name']) - return schema.values() + return list(schema.values()) register(AzureKusto) diff --git a/redash/query_runner/big_query.py b/redash/query_runner/big_query.py index 782a600409..03defe1fcd 100644 --- a/redash/query_runner/big_query.py +++ b/redash/query_runner/big_query.py @@ -188,8 +188,7 @@ def _get_job_data(self, query): if self.configuration.get('userDefinedFunctionResourceUri'): resource_uris = self.configuration["userDefinedFunctionResourceUri"].split(',') - job_data["configuration"]["query"]["userDefinedFunctionResources"] = map( - lambda resource_uri: {"resourceUri": resource_uri}, resource_uris) + job_data["configuration"]["query"]["userDefinedFunctionResources"] = [{"resourceUri": resource_uri} for resource_uri in resource_uris] if "maximumBillingTier" in self.configuration: job_data["configuration"]["query"]["maximumBillingTier"] = self.configuration["maximumBillingTier"] @@ -253,7 +252,7 @@ def _get_columns_schema_column(self, column): columns = [] if column['type'] == 'RECORD': for field in column['fields']: - columns.append(u"{}.{}".format(column['name'], field['name'])) + columns.append("{}.{}".format(column['name'], field['name'])) else: columns.append(column['name']) diff --git a/redash/query_runner/cass.py b/redash/query_runner/cass.py index 0f0c72ff66..e56aad703a 100644 --- a/redash/query_runner/cass.py +++ b/redash/query_runner/cass.py @@ -102,7 +102,7 @@ def get_schema(self, get_stats=False): schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'].append(column_name) - return schema.values() + return list(schema.values()) def run_query(self, query, user): connection = None @@ -126,7 +126,7 @@ def run_query(self, query, user): column_names = result.column_names - columns = self.fetch_columns(map(lambda c: (c, 'string'), column_names)) + columns = self.fetch_columns([(c, 'string') for c in column_names]) rows = [dict(zip(column_names, row)) for row in result] diff --git a/redash/query_runner/clickhouse.py b/redash/query_runner/clickhouse.py index 1fd879a7ca..1217a9ec18 100644 --- a/redash/query_runner/clickhouse.py +++ b/redash/query_runner/clickhouse.py @@ -65,7 +65,7 @@ def _get_tables(self, schema): schema[table_name]['columns'].append(row['name']) - return schema.values() + return list(schema.values()) def _send_query(self, data, stream=False): url = self.configuration.get('url', "http://127.0.0.1:8123") @@ -137,7 +137,7 @@ def _clickhouse_query(self, query): if 'totals' in result: totals = result['totals'] - for column, value in columns_totals.iteritems(): + for column, value in columns_totals.items(): totals[column] = value rows.append(totals) @@ -156,7 +156,7 @@ def run_query(self, query, user): except Exception as e: data = None logging.exception(e) - error = unicode(e) + error = str(e) return data, error diff --git a/redash/query_runner/couchbase.py b/redash/query_runner/couchbase.py index 093cd387a6..1753bc0dd5 100644 --- a/redash/query_runner/couchbase.py +++ b/redash/query_runner/couchbase.py @@ -6,7 +6,6 @@ from redash.query_runner import * from redash.utils import JSONEncoder, json_dumps, json_loads, parse_human_time -from redash.utils.compat import long import json logger = logging.getLogger(__name__) @@ -21,7 +20,6 @@ str: TYPE_STRING, text_type: TYPE_STRING, int: TYPE_INTEGER, - long: TYPE_INTEGER, float: TYPE_FLOAT, bool: TYPE_BOOLEAN, datetime.datetime: TYPE_DATETIME, @@ -45,7 +43,7 @@ def parse_results(results): for key in row: if isinstance(row[key], dict): for inner_key in row[key]: - column_name = u'{}.{}'.format(key, inner_key) + column_name = '{}.{}'.format(key, inner_key) if _get_column_by_name(columns, column_name) is None: columns.append({ "name": column_name, @@ -122,7 +120,7 @@ def get_buckets(self, query, name_param): table_name = row.get(name_param) schema[table_name] = {'name': table_name, 'columns': defaultColumns} - return schema.values() + return list(schema.values()) def get_schema(self, get_stats=False): diff --git a/redash/query_runner/databricks.py b/redash/query_runner/databricks.py index 28b13593c5..04deb22e35 100644 --- a/redash/query_runner/databricks.py +++ b/redash/query_runner/databricks.py @@ -70,15 +70,15 @@ def _get_tables(self, schema): schemas = self._run_query_internal(schemas_query) - for schema_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['databaseName']), schemas)): - for table_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['tableName']), self._run_query_internal(tables_query % schema_name))): - columns = filter(lambda a: len(a) > 0, map(lambda a: str(a['col_name']), self._run_query_internal(columns_query % (schema_name, table_name)))) + for schema_name in [a for a in [str(a['databaseName']) for a in schemas] if len(a) > 0]: + for table_name in [a for a in [str(a['tableName']) for a in self._run_query_internal(tables_query % schema_name)] if len(a) > 0]: + columns = [a for a in [str(a['col_name']) for a in self._run_query_internal(columns_query % (schema_name, table_name))] if len(a) > 0] if schema_name != 'default': table_name = '{}.{}'.format(schema_name, table_name) schema[table_name] = {'name': table_name, 'columns': columns} - return schema.values() + return list(schema.values()) register(Databricks) diff --git a/redash/query_runner/db2.py b/redash/query_runner/db2.py index 06a2de1f9c..8f2257a1e3 100644 --- a/redash/query_runner/db2.py +++ b/redash/query_runner/db2.py @@ -83,7 +83,7 @@ def _get_definitions(self, schema, query): results = json_loads(results) for row in results['rows']: - if row['TABLE_SCHEMA'] != u'public': + if row['TABLE_SCHEMA'] != 'public': table_name = '{}.{}'.format(row['TABLE_SCHEMA'], row['TABLE_NAME']) else: table_name = row['TABLE_NAME'] @@ -105,7 +105,7 @@ def _get_tables(self, schema): """ self._get_definitions(schema, query) - return schema.values() + return list(schema.values()) def _get_connection(self): self.connection_string = "DATABASE={};HOSTNAME={};PORT={};PROTOCOL=TCPIP;UID={};PWD={};".format( @@ -123,7 +123,7 @@ def run_query(self, query, user): if cursor.description is not None: columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description]) - rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor] + rows = [dict(zip((column['name'] for column in columns), row)) for row in cursor] data = {'columns': columns, 'rows': rows} error = None diff --git a/redash/query_runner/dgraph.py b/redash/query_runner/dgraph.py index d5342e163e..3bf68c82d2 100644 --- a/redash/query_runner/dgraph.py +++ b/redash/query_runner/dgraph.py @@ -15,13 +15,13 @@ def reduce_item(reduced_item, key, value): # Reduction Condition 1 if type(value) is list: for i, sub_item in enumerate(value): - reduce_item(reduced_item, u'{}.{}'.format(key, i), sub_item) + reduce_item(reduced_item, '{}.{}'.format(key, i), sub_item) # Reduction Condition 2 elif type(value) is dict: sub_keys = value.keys() for sub_key in sub_keys: - reduce_item(reduced_item, u'{}.{}'.format(key, sub_key), value[sub_key]) + reduce_item(reduced_item, '{}.{}'.format(key, sub_key), value[sub_key]) # Base Condition else: @@ -93,7 +93,7 @@ def run_query(self, query, user): try: data = self.run_dgraph_query_raw(query) - first_key = next(iter(data.keys())) + first_key = next(iter(list(data.keys()))) first_node = data[first_key] data_to_be_processed = first_node @@ -138,7 +138,7 @@ def get_schema(self, get_stats=False): if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} - return schema.values() + return list(schema.values()) register(Dgraph) diff --git a/redash/query_runner/drill.py b/redash/query_runner/drill.py index 5c5ce12db8..4c19fdfbc4 100644 --- a/redash/query_runner/drill.py +++ b/redash/query_runner/drill.py @@ -55,7 +55,7 @@ def parse_response(data): types[col['name']] = col['type'] for row in rows: - for key, value in row.iteritems(): + for key, value in row.items(): row[key] = convert_type(value, types[key]) return {'columns': columns, 'rows': rows} @@ -120,7 +120,7 @@ def get_schema(self, get_stats=False): """ allowed_schemas = self.configuration.get('allowed_schemas') if allowed_schemas: - query += "and TABLE_SCHEMA in ({})".format(', '.join(map(lambda x: "'{}'".format(re.sub('[^a-zA-Z0-9_.`]', '', x)), allowed_schemas.split(',')))) + query += "and TABLE_SCHEMA in ({})".format(', '.join(["'{}'".format(re.sub('[^a-zA-Z0-9_.`]', '', allowed_schema)) for allowed_schema in allowed_schemas.split(',')])) results, error = self.run_query(query, None) @@ -132,14 +132,14 @@ def get_schema(self, get_stats=False): schema = {} for row in results['rows']: - table_name = u'{}.{}'.format(row['TABLE_SCHEMA'], row['TABLE_NAME']) + table_name = '{}.{}'.format(row['TABLE_SCHEMA'], row['TABLE_NAME']) if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'].append(row['COLUMN_NAME']) - return schema.values() + return list(schema.values()) register(Drill) diff --git a/redash/query_runner/druid.py b/redash/query_runner/druid.py index 6d629b4998..17e55f4a67 100644 --- a/redash/query_runner/druid.py +++ b/redash/query_runner/druid.py @@ -55,7 +55,7 @@ def run_query(self, query, user): try: cursor.execute(query) columns = self.fetch_columns([(i[0], TYPES_MAP.get(i[1], None)) for i in cursor.description]) - rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor] + rows = [dict(zip((column['name'] for column in columns), row)) for row in cursor] data = {'columns': columns, 'rows': rows} error = None @@ -91,7 +91,7 @@ def get_schema(self, get_stats=False): schema[table_name]['columns'].append(row['COLUMN_NAME']) - return schema.values() + return list(schema.values()) register(Druid) diff --git a/redash/query_runner/dynamodb_sql.py b/redash/query_runner/dynamodb_sql.py index 12be5a7f65..f5fc7f0e3d 100644 --- a/redash/query_runner/dynamodb_sql.py +++ b/redash/query_runner/dynamodb_sql.py @@ -91,7 +91,7 @@ def _get_tables(self, schema): try: table = engine.describe(table_name, True) schema[table.name] = {'name': table.name, - 'columns': table.attrs.keys()} + 'columns': list(table.attrs.keys())} except DynamoDBError: pass @@ -110,7 +110,7 @@ def run_query(self, query, user): # When running a count query it returns the value as a string, in which case # we transform it into a dictionary to be the same as regular queries. - if isinstance(result, basestring): + if isinstance(result, str): # when count < scanned_count, dql returns a string with number of rows scanned value = result.split(" (")[0] if value: @@ -119,7 +119,7 @@ def run_query(self, query, user): for item in result: if not columns: - for k, v in item.iteritems(): + for k, v in item.items(): columns.append({ 'name': k, 'friendly_name': k, @@ -131,7 +131,7 @@ def run_query(self, query, user): json_data = json_dumps(data) error = None except ParseException as e: - error = u"Error parsing query at line {} (column {}):\n{}".format( + error = "Error parsing query at line {} (column {}):\n{}".format( e.lineno, e.column, e.line) json_data = None except (SyntaxError, RuntimeError) as e: diff --git a/redash/query_runner/elasticsearch.py b/redash/query_runner/elasticsearch.py index 2bb338a767..a5f08c8e28 100644 --- a/redash/query_runner/elasticsearch.py +++ b/redash/query_runner/elasticsearch.py @@ -1,6 +1,8 @@ import logging import sys -import urllib +import urllib.request +import urllib.parse +import urllib.error import requests from requests.auth import HTTPBasicAuth @@ -8,13 +10,12 @@ from redash.query_runner import * from redash.utils import json_dumps, json_loads -from redash.utils.compat import long try: import http.client as http_client except ImportError: # Python 2 - import httplib as http_client + import http.client as http_client logger = logging.getLogger(__name__) @@ -40,7 +41,6 @@ text_type: TYPE_STRING, bool: TYPE_BOOLEAN, int: TYPE_INTEGER, - long: TYPE_INTEGER, float: TYPE_FLOAT } @@ -175,7 +175,7 @@ def parse_doc(doc, path=None): # remove duplicates # sort alphabetically schema[name]['columns'] = sorted(set(columns)) - return schema.values() + return list(schema.values()) def _parse_results(self, mappings, result_fields, raw_result, result_columns, result_rows): def add_column_if_needed(mappings, column_name, friendly_name, result_columns, result_columns_index): @@ -202,7 +202,7 @@ def collect_value(mappings, row, key, value, type): def collect_aggregations(mappings, rows, parent_key, data, row, result_columns, result_columns_index): if isinstance(data, dict): - for key, value in data.iteritems(): + for key, value in data.items(): val = collect_aggregations(mappings, rows, parent_key if key == 'buckets' else key, value, row, result_columns, result_columns_index) if val: row = get_row(rows, row) @@ -211,7 +211,7 @@ def collect_aggregations(mappings, rows, parent_key, data, row, result_columns, for data_key in ['value', 'doc_count']: if data_key not in data: continue - if 'key' in data and len(data.keys()) == 2: + if 'key' in data and len(list(data.keys())) == 2: key_is_string = 'key_as_string' in data collect_value(mappings, row, data['key'] if not key_is_string else data['key_as_string'], data[data_key], 'long' if not key_is_string else 'string') else: @@ -249,7 +249,7 @@ def collect_aggregations(mappings, rows, parent_key, data, row, result_columns, for field in result_fields: add_column_if_needed(mappings, field, field, result_columns, result_columns_index) - for key, data in raw_result["aggregations"].iteritems(): + for key, data in raw_result["aggregations"].items(): collect_aggregations(mappings, result_rows, key, data, None, result_columns, result_columns_index) logger.debug("result_rows %s", str(result_rows)) @@ -334,9 +334,9 @@ def run_query(self, query, user): return None, error if sort: - url += "&sort={0}".format(urllib.quote_plus(sort)) + url += "&sort={0}".format(urllib.parse.quote_plus(sort)) - url += "&q={0}".format(urllib.quote_plus(query_data)) + url += "&q={0}".format(urllib.parse.quote_plus(query_data)) logger.debug("Using URL: {0}".format(url)) logger.debug("Using Query: {0}".format(query_data)) diff --git a/redash/query_runner/google_analytics.py b/redash/query_runner/google_analytics.py index 479403d6de..ed519e6c1c 100644 --- a/redash/query_runner/google_analytics.py +++ b/redash/query_runner/google_analytics.py @@ -1,9 +1,7 @@ -# -*- coding: utf-8 -*- - import logging from base64 import b64decode from datetime import datetime -from urlparse import parse_qs, urlparse +from urllib.parse import parse_qs, urlparse from redash.query_runner import * from redash.utils import json_dumps, json_loads @@ -47,7 +45,7 @@ def parse_ga_response(response): d = {} for c, value in enumerate(r): column_name = response['columnHeaders'][c]['name'] - column_type = filter(lambda col: col['name'] == column_name, columns)[0]['type'] + column_type = [col for col in columns if col['name'] == column_name][0]['type'] # mcf results come a bit different than ga results: if isinstance(value, dict): @@ -128,10 +126,10 @@ def _get_tables(self, schema): for property_ in properties: if 'defaultProfileId' in property_ and 'name' in property_: schema[account['name']]['columns'].append( - u'{0} (ga:{1})'.format(property_['name'], property_['defaultProfileId']) + '{0} (ga:{1})'.format(property_['name'], property_['defaultProfileId']) ) - return schema.values() + return list(schema.values()) def test_connection(self): try: diff --git a/redash/query_runner/google_spreadsheets.py b/redash/query_runner/google_spreadsheets.py index 5c369d3534..7584153665 100644 --- a/redash/query_runner/google_spreadsheets.py +++ b/redash/query_runner/google_spreadsheets.py @@ -35,7 +35,7 @@ def _get_columns_and_column_names(row): column_name = 'column_{}'.format(xl_col_to_name(i)) if column_name in column_names: - column_name = u"{}{}".format(column_name, duplicate_counter) + column_name = "{}{}".format(column_name, duplicate_counter) duplicate_counter += 1 column_names.append(column_name) @@ -56,7 +56,7 @@ def _value_eval_list(row_values, col_types): if rval is None or rval == '': val = None elif typ == TYPE_BOOLEAN: - val = True if unicode(rval).lower() == 'true' else False + val = True if str(rval).lower() == 'true' else False elif typ == TYPE_DATETIME: val = parser.parse(rval) elif typ == TYPE_FLOAT: @@ -65,7 +65,7 @@ def _value_eval_list(row_values, col_types): val = int(rval) else: # for TYPE_STRING and default - val = unicode(rval) + val = str(rval) value_list.append(val) except (ValueError, OverflowError): value_list.append(rval) diff --git a/redash/query_runner/hive_ds.py b/redash/query_runner/hive_ds.py index 6b10e23ebc..59555f6589 100644 --- a/redash/query_runner/hive_ds.py +++ b/redash/query_runner/hive_ds.py @@ -77,15 +77,15 @@ def _get_tables(self, schema): columns_query = "show columns in %s.%s" - for schema_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['database_name']), self._run_query_internal(schemas_query))): - for table_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['tab_name']), self._run_query_internal(tables_query % schema_name))): - columns = filter(lambda a: len(a) > 0, map(lambda a: str(a['field']), self._run_query_internal(columns_query % (schema_name, table_name)))) + for schema_name in [a for a in [str(a['database_name']) for a in self._run_query_internal(schemas_query)] if len(a) > 0]: + for table_name in [a for a in [str(a['tab_name']) for a in self._run_query_internal(tables_query % schema_name)] if len(a) > 0]: + columns = [a for a in [str(a['field']) for a in self._run_query_internal(columns_query % (schema_name, table_name))] if len(a) > 0] if schema_name != 'default': table_name = '{}.{}'.format(schema_name, table_name) schema[table_name] = {'name': table_name, 'columns': columns} - return schema.values() + return list(schema.values()) def _get_connection(self): host = self.configuration['host'] diff --git a/redash/query_runner/impala_ds.py b/redash/query_runner/impala_ds.py index 47f62f000d..7f64164f73 100644 --- a/redash/query_runner/impala_ds.py +++ b/redash/query_runner/impala_ds.py @@ -83,16 +83,16 @@ def _get_tables(self, schema_dict): tables_query = "show tables in %s;" columns_query = "show column stats %s.%s;" - for schema_name in map(lambda a: unicode(a['name']), self._run_query_internal(schemas_query)): - for table_name in map(lambda a: unicode(a['name']), self._run_query_internal(tables_query % schema_name)): - columns = map(lambda a: unicode(a['Column']), self._run_query_internal(columns_query % (schema_name, table_name))) + for schema_name in [str(a['name']) for a in self._run_query_internal(schemas_query)]: + for table_name in [str(a['name']) for a in self._run_query_internal(tables_query % schema_name)]: + columns = [str(a['Column']) for a in self._run_query_internal(columns_query % (schema_name, table_name))] if schema_name != 'default': table_name = '{}.{}'.format(schema_name, table_name) schema_dict[table_name] = {'name': table_name, 'columns': columns} - return schema_dict.values() + return list(schema_dict.values()) def run_query(self, query, user): diff --git a/redash/query_runner/jql.py b/redash/query_runner/jql.py index 47a47b2fe6..7b10387353 100644 --- a/redash/query_runner/jql.py +++ b/redash/query_runner/jql.py @@ -22,7 +22,7 @@ def add_column(self, column, column_type=TYPE_STRING): self.columns[column] = {'name': column, 'type': column_type, 'friendly_name': column} def to_json(self): - return json_dumps({'rows': self.rows, 'columns': self.columns.values()}) + return json_dumps({'rows': self.rows, 'columns': list(self.columns.values())}) def merge(self, set): self.rows = self.rows + set.rows @@ -32,7 +32,7 @@ def parse_issue(issue, field_mapping): result = OrderedDict() result['key'] = issue['key'] - for k, v in issue['fields'].iteritems():# + for k, v in issue['fields'].items():# output_name = field_mapping.get_output_field_name(k) member_names = field_mapping.get_dict_members(k) @@ -102,7 +102,7 @@ class FieldMapping: def __init__(cls, query_field_mapping): cls.mapping = [] - for k, v in query_field_mapping.iteritems(): + for k, v in query_field_mapping.items(): field_name = k member_name = None diff --git a/redash/query_runner/json_ds.py b/redash/query_runner/json_ds.py index 9cf3226ed8..7467a52288 100644 --- a/redash/query_runner/json_ds.py +++ b/redash/query_runner/json_ds.py @@ -3,11 +3,10 @@ import socket import ipaddress import datetime -from urlparse import urlparse +from urllib.parse import urlparse from funcy import compact, project from six import text_type from redash.utils import json_dumps -from redash.utils.compat import long from redash.query_runner import (BaseHTTPQueryRunner, register, TYPE_BOOLEAN, TYPE_DATETIME, TYPE_FLOAT, TYPE_INTEGER, TYPE_STRING) @@ -41,7 +40,6 @@ def is_private_address(url): str: TYPE_STRING, text_type: TYPE_STRING, int: TYPE_INTEGER, - long: TYPE_INTEGER, float: TYPE_FLOAT, bool: TYPE_BOOLEAN, datetime.datetime: TYPE_DATETIME, @@ -116,7 +114,7 @@ def parse_json(data, path, fields): for key in row: if isinstance(row[key], dict): for inner_key in row[key]: - column_name = u'{}.{}'.format(key, inner_key) + column_name = '{}.{}'.format(key, inner_key) if fields and key not in fields and column_name not in fields: continue diff --git a/redash/query_runner/kylin.py b/redash/query_runner/kylin.py index 261fa3f5e0..c08954d3a1 100644 --- a/redash/query_runner/kylin.py +++ b/redash/query_runner/kylin.py @@ -132,7 +132,7 @@ def get_columns(self, colmetas): def get_rows(self, columns, results): return [ - dict(zip((c['name'] for c in columns), row)) + dict(zip((column['name'] for column in columns), row)) for row in results ] diff --git a/redash/query_runner/mapd.py b/redash/query_runner/mapd.py index d116efa456..d4e6eaef0d 100644 --- a/redash/query_runner/mapd.py +++ b/redash/query_runner/mapd.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import + try: import pymapd @@ -82,7 +82,7 @@ def run_query(self, query, user): try: cursor.execute(query) columns = self.fetch_columns([(i[0], TYPES_MAP.get(i[1], None)) for i in cursor.description]) - rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor] + rows = [dict(zip((column['name'] for column in columns), row)) for row in cursor] data = {'columns': columns, 'rows': rows} error = None json_data = json_dumps(data) @@ -102,7 +102,7 @@ def _get_tables(self, schema): finally: connection.close - return schema.values() + return list(schema.values()) def test_connection(self): connection = self.connect_database() diff --git a/redash/query_runner/memsql_ds.py b/redash/query_runner/memsql_ds.py index 917e4962cb..4525920e03 100644 --- a/redash/query_runner/memsql_ds.py +++ b/redash/query_runner/memsql_ds.py @@ -78,17 +78,14 @@ def _get_tables(self, schema): columns_query = "show columns in %s" - for schema_name in filter(lambda a: len(a) > 0, - map(lambda a: str(a['Database']), self._run_query_internal(schemas_query))): - for table_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['Tables_in_%s' % schema_name]), - self._run_query_internal( - tables_query % schema_name))): + for schema_name in [a for a in [str(a['Database']) for a in self._run_query_internal(schemas_query)] if len(a) > 0]: + for table_name in [a for a in [str(a['Tables_in_%s' % schema_name]) for a in self._run_query_internal( + tables_query % schema_name)] if len(a) > 0]: table_name = '.'.join((schema_name, table_name)) - columns = filter(lambda a: len(a) > 0, map(lambda a: str(a['Field']), - self._run_query_internal(columns_query % table_name))) + columns = [a for a in [str(a['Field']) for a in self._run_query_internal(columns_query % table_name)] if len(a) > 0] schema[table_name] = {'name': table_name, 'columns': columns} - return schema.values() + return list(schema.values()) def run_query(self, query, user): @@ -110,7 +107,7 @@ def run_query(self, query, user): # 'type': types_map.get(column[COLUMN_TYPE], None) # }) - rows = [dict(zip(list(row.keys()), list(row.values()))) for row in res] + rows = [dict(zip(row.keys(), row.values())) for row in res] # ==================================================================================================== # temporary - until https://github.com/memsql/memsql-python/pull/8 gets merged diff --git a/redash/query_runner/mongodb.py b/redash/query_runner/mongodb.py index b6dad02747..558942197d 100644 --- a/redash/query_runner/mongodb.py +++ b/redash/query_runner/mongodb.py @@ -7,7 +7,6 @@ from redash.query_runner import * from redash.utils import JSONEncoder, json_dumps, json_loads, parse_human_time -from redash.utils.compat import long logger = logging.getLogger(__name__) @@ -28,7 +27,6 @@ str: TYPE_STRING, text_type: TYPE_STRING, int: TYPE_INTEGER, - long: TYPE_INTEGER, float: TYPE_FLOAT, bool: TYPE_BOOLEAN, datetime.datetime: TYPE_DATETIME, @@ -57,7 +55,7 @@ def parse_oids(oids): def datetime_parser(dct): - for k, v in dct.iteritems(): + for k, v in dct.items(): if isinstance(v, string_types): m = date_regex.findall(v) if len(m) > 0: @@ -95,7 +93,7 @@ def parse_results(results): for key in row: if isinstance(row[key], dict): for inner_key in row[key]: - column_name = u'{}.{}'.format(key, inner_key) + column_name = '{}.{}'.format(key, inner_key) if _get_column_by_name(columns, column_name) is None: columns.append({ "name": column_name, @@ -218,7 +216,7 @@ def get_schema(self, get_stats=False): schema[collection_name] = { "name": collection_name, "columns": sorted(columns)} - return schema.values() + return list(schema.values()) def run_query(self, query, user): db = self._get_db() diff --git a/redash/query_runner/mssql.py b/redash/query_runner/mssql.py index 4349acebf3..541c736747 100644 --- a/redash/query_runner/mssql.py +++ b/redash/query_runner/mssql.py @@ -98,7 +98,7 @@ def _get_tables(self, schema): for row in results['rows']: if row['table_schema'] != self.configuration['db']: - table_name = u'{}.{}'.format(row['table_schema'], row['table_name']) + table_name = '{}.{}'.format(row['table_schema'], row['table_name']) else: table_name = row['table_name'] @@ -107,7 +107,7 @@ def _get_tables(self, schema): schema[table_name]['columns'].append(row['column_name']) - return schema.values() + return list(schema.values()) def run_query(self, query, user): connection = None @@ -126,7 +126,7 @@ def run_query(self, query, user): connection = pymssql.connect(server=server, user=user, password=password, database=db, tds_version=tds_version, charset=charset) - if isinstance(query, unicode): + if isinstance(query, str): query = query.encode(charset) cursor = connection.cursor() @@ -137,7 +137,7 @@ def run_query(self, query, user): if cursor.description is not None: columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description]) - rows = [dict(zip((c['name'] for c in columns), row)) for row in data] + rows = [dict(zip((column['name'] for column in columns), row)) for row in data] data = {'columns': columns, 'rows': rows} json_data = json_dumps(data) diff --git a/redash/query_runner/mssql_odbc.py b/redash/query_runner/mssql_odbc.py index 7736c56fba..48c18cb930 100644 --- a/redash/query_runner/mssql_odbc.py +++ b/redash/query_runner/mssql_odbc.py @@ -88,7 +88,7 @@ def _get_tables(self, schema): for row in results['rows']: if row['table_schema'] != self.configuration['db']: - table_name = u'{}.{}'.format(row['table_schema'], row['table_name']) + table_name = '{}.{}'.format(row['table_schema'], row['table_name']) else: table_name = row['table_name'] @@ -97,7 +97,7 @@ def _get_tables(self, schema): schema[table_name]['columns'].append(row['column_name']) - return schema.values() + return list(schema.values()) def run_query(self, query, user): connection = None @@ -126,7 +126,7 @@ def run_query(self, query, user): if cursor.description is not None: columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description]) - rows = [dict(zip((c['name'] for c in columns), row)) for row in data] + rows = [dict(zip((column['name'] for column in columns), row)) for row in data] data = {'columns': columns, 'rows': rows} json_data = json_dumps(data) diff --git a/redash/query_runner/mysql.py b/redash/query_runner/mysql.py index 907fb15b54..a6f5d5c480 100644 --- a/redash/query_runner/mysql.py +++ b/redash/query_runner/mysql.py @@ -143,7 +143,7 @@ def _get_tables(self, schema): for row in results['rows']: if row['table_schema'] != self.configuration['db']: - table_name = u'{}.{}'.format(row['table_schema'], + table_name = '{}.{}'.format(row['table_schema'], row['table_name']) else: table_name = row['table_name'] @@ -153,7 +153,7 @@ def _get_tables(self, schema): schema[table_name]['columns'].append(row['column_name']) - return schema.values() + return list(schema.values()) def run_query(self, query, user): ev = threading.Event() @@ -197,7 +197,7 @@ def _run_query(self, query, user, connection, r, ev): columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in desc]) rows = [ - dict(zip((c['name'] for c in columns), row)) + dict(zip((column['name'] for column in columns), row)) for row in data ] @@ -226,7 +226,11 @@ def _get_ssl_parameters(self): ssl_params = {} if self.configuration.get('use_ssl'): - config_map = dict(ssl_cacert='ca', ssl_cert='cert', ssl_key='key') + config_map = { + "ssl_cacert": "ca", + "ssl_cert": "cert", + "ssl_key": "key", + } for key, cfg in config_map.items(): val = self.configuration.get(key) if val: diff --git a/redash/query_runner/oracle.py b/redash/query_runner/oracle.py index 10795dcbee..11a382c35b 100644 --- a/redash/query_runner/oracle.py +++ b/redash/query_runner/oracle.py @@ -111,7 +111,7 @@ def _get_tables(self, schema): schema[table_name]['columns'].append(row['COLUMN_NAME']) - return schema.values() + return list(schema.values()) @classmethod def _convert_number(cls, value): @@ -126,7 +126,7 @@ def output_handler(cls, cursor, name, default_type, length, precision, scale): return cursor.var(cx_Oracle.LONG_STRING, 80000, cursor.arraysize) if default_type in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR): - return cursor.var(unicode, length, cursor.arraysize) + return cursor.var(str, length, cursor.arraysize) if default_type == cx_Oracle.NUMBER: if scale <= 0: @@ -143,7 +143,7 @@ def run_query(self, query, user): rows_count = cursor.rowcount if cursor.description is not None: columns = self.fetch_columns([(i[0], Oracle.get_col_type(i[1], i[5])) for i in cursor.description]) - rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor] + rows = [dict(zip((column['name'] for column in columns), row)) for row in cursor] data = {'columns': columns, 'rows': rows} error = None json_data = json_dumps(data) @@ -154,7 +154,7 @@ def run_query(self, query, user): json_data = json_dumps(data) connection.commit() except cx_Oracle.DatabaseError as err: - error = u"Query failed. {}.".format(err.message) + error = "Query failed. {}.".format(err.message) json_data = None except KeyboardInterrupt: connection.cancel() diff --git a/redash/query_runner/pg.py b/redash/query_runner/pg.py index f6060d431d..99787730bb 100644 --- a/redash/query_runner/pg.py +++ b/redash/query_runner/pg.py @@ -64,9 +64,9 @@ def _wait(conn, timeout=None): def full_table_name(schema, name): - if '.' in name: + if '.' in name: name = u'"{}"'.format(name) - + return u'{}.{}'.format(schema, name) @@ -186,7 +186,7 @@ def _get_tables(self, schema): self._get_definitions(schema, query) - return schema.values() + return list(schema.values()) def _get_connection(self): connection = psycopg2.connect( @@ -214,7 +214,7 @@ def run_query(self, query, user): columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description]) rows = [ - dict(zip((c['name'] for c in columns), row)) + dict(zip((column['name'] for column in columns), row)) for row in cursor ] @@ -230,7 +230,7 @@ def run_query(self, query, user): error = "Query interrupted. Please retry." json_data = None except psycopg2.DatabaseError as e: - error = e.message + error = str(e) json_data = None except (KeyboardInterrupt, InterruptException): connection.cancel() @@ -304,7 +304,7 @@ def configuration_schema(cls): "required": ["dbname", "user", "password", "host", "port"], "secret": ["password"] } - + def annotate_query(self, query, metadata): annotated = super(Redshift, self).annotate_query(query, metadata) @@ -312,11 +312,11 @@ def annotate_query(self, query, metadata): query_group = self.configuration.get('scheduled_query_group') else: query_group = self.configuration.get('adhoc_query_group') - + if query_group: set_query_group = 'set query_group to {};'.format(query_group) annotated = '{}\n{}'.format(set_query_group, annotated) - + return annotated def _get_tables(self, schema): @@ -349,7 +349,7 @@ def _get_tables(self, schema): self._get_definitions(schema, query) - return schema.values() + return list(schema.values()) class CockroachDB(PostgreSQL): diff --git a/redash/query_runner/phoenix.py b/redash/query_runner/phoenix.py index 1c36aaf571..22fc7c996c 100644 --- a/redash/query_runner/phoenix.py +++ b/redash/query_runner/phoenix.py @@ -86,7 +86,7 @@ def get_schema(self, get_stats=False): schema[table_name]['columns'].append(row['COLUMN_NAME']) - return schema.values() + return list(schema.values()) def run_query(self, query, user): connection = phoenixdb.connect( @@ -99,7 +99,7 @@ def run_query(self, query, user): cursor.execute(query) column_tuples = [(i[0], TYPES_MAPPING.get(i[1], None)) for i in cursor.description] columns = self.fetch_columns(column_tuples) - rows = [dict(zip(([c['name'] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())] + rows = [dict(zip(([column['name'] for column in columns]), r)) for i, r in enumerate(cursor.fetchall())] data = {'columns': columns, 'rows': rows} json_data = json_dumps(data) error = None @@ -112,7 +112,7 @@ def run_query(self, query, user): json_data = None except Exception as ex: json_data = None - error = unicode(ex) + error = str(ex) finally: if connection: connection.close() diff --git a/redash/query_runner/presto.py b/redash/query_runner/presto.py index 2966d1ccf9..56369903ef 100644 --- a/redash/query_runner/presto.py +++ b/redash/query_runner/presto.py @@ -95,7 +95,7 @@ def get_schema(self, get_stats=False): schema[table_name]['columns'].append(row['column_name']) - return schema.values() + return list(schema.values()) def run_query(self, query, user): connection = presto.connect( @@ -114,7 +114,7 @@ def run_query(self, query, user): column_tuples = [(i[0], PRESTO_TYPES_MAPPING.get(i[1], None)) for i in cursor.description] columns = self.fetch_columns(column_tuples) - rows = [dict(zip(([c['name'] for c in columns]), r)) + rows = [dict(zip(([column['name'] for column in columns]), r)) for i, r in enumerate(cursor.fetchall())] data = {'columns': columns, 'rows': rows} json_data = json_dumps(data) @@ -136,8 +136,8 @@ def run_query(self, query, user): except Exception as ex: json_data = None error = ex.message - if not isinstance(error, basestring): - error = unicode(error) + if not isinstance(error, str): + error = str(error) return json_data, error diff --git a/redash/query_runner/prometheus.py b/redash/query_runner/prometheus.py index 6279d90a69..180fa7bc7d 100644 --- a/redash/query_runner/prometheus.py +++ b/redash/query_runner/prometheus.py @@ -2,7 +2,7 @@ import time from datetime import datetime from dateutil import parser -from urlparse import parse_qs +from urllib.parse import parse_qs from redash.query_runner import BaseQueryRunner, register, TYPE_DATETIME, TYPE_STRING from redash.utils import json_dumps @@ -93,7 +93,7 @@ def get_schema(self, get_stats=False): schema = {} for name in data: schema[name] = {'name': name, 'columns': []} - return schema.values() + return list(schema.values()) def run_query(self, query, user): """ diff --git a/redash/query_runner/qubole.py b/redash/query_runner/qubole.py index 82276ca139..c5ea676d6f 100644 --- a/redash/query_runner/qubole.py +++ b/redash/query_runner/qubole.py @@ -1,8 +1,8 @@ -from __future__ import absolute_import + import time import requests import logging -from cStringIO import StringIO +from io import StringIO from redash.query_runner import BaseQueryRunner, register from redash.query_runner import TYPE_STRING @@ -106,7 +106,7 @@ def run_query(self, query, user): data = results.split('\r\n') columns = self.fetch_columns([(i, TYPE_STRING) for i in data.pop(0).split('\t')]) - rows = [dict(zip((c['name'] for c in columns), row.split('\t'))) for row in data] + rows = [dict(zip((column['name'] for column in columns), row.split('\t'))) for row in data] json_data = json_dumps({'columns': columns, 'rows': rows}) except KeyboardInterrupt: @@ -128,7 +128,7 @@ def get_schema(self, get_stats=False): for schema in data['schemas']: tables = data['schemas'][schema] for table in tables: - table_name = table.keys()[0] + table_name = list(table.keys())[0] columns = [f['name'] for f in table[table_name]['columns']] if schema != 'default': @@ -139,7 +139,7 @@ def get_schema(self, get_stats=False): except Exception as e: logging.error("Failed to get schema information from Qubole. Error {}".format(str(e))) - return schemas.values() + return list(schemas.values()) def _get_header(self): return {"Content-type": "application/json", "Accept": "application/json", diff --git a/redash/query_runner/query_results.py b/redash/query_runner/query_results.py index 97e174e398..564a234470 100644 --- a/redash/query_runner/query_results.py +++ b/redash/query_runner/query_results.py @@ -38,7 +38,7 @@ def _load_query(user, query_id): # TODO: this duplicates some of the logic we already have in the redash.handlers.query_results. # We should merge it so it's consistent. if not has_access(query.data_source, user, view_only): - raise PermissionError(u"You do not have access to query id {}.".format( + raise PermissionError("You do not have access to query id {}.".format( query.id)) return query @@ -78,7 +78,7 @@ def create_tables_from_query_ids(user, def fix_column_name(name): - return u'"{}"'.format(re.sub('[:."\s]', '_', name, flags=re.UNICODE)) + return '"{}"'.format(re.sub('[:."\s]', '_', name, flags=re.UNICODE)) def flatten(value): @@ -94,15 +94,15 @@ def create_table(connection, table_name, query_results): safe_columns = [fix_column_name(column) for column in columns] column_list = ", ".join(safe_columns) - create_table = u"CREATE TABLE {table_name} ({column_list})".format( + create_table = "CREATE TABLE {table_name} ({column_list})".format( table_name=table_name, column_list=column_list) logger.debug("CREATE TABLE query: %s", create_table) connection.execute(create_table) except sqlite3.OperationalError as exc: - raise CreateTableError(u"Error creating table {}: {}".format( - table_name, exc.message)) + raise CreateTableError("Error creating table {}: {}".format( + table_name, str(exc))) - insert_template = u"insert into {table_name} ({column_list}) values ({place_holders})".format( + insert_template = "insert into {table_name} ({column_list}) values ({place_holders})".format( table_name=table_name, column_list=column_list, place_holders=','.join(['?'] * len(columns))) diff --git a/redash/query_runner/rockset.py b/redash/query_runner/rockset.py index 715fc100ae..5d3a6a332e 100644 --- a/redash/query_runner/rockset.py +++ b/redash/query_runner/rockset.py @@ -79,9 +79,9 @@ def _get_tables(self, schema): for col in self.api.list(): table_name = col['name'] describe = self.api.query('DESCRIBE "{}"'.format(table_name)) - columns = list(set(map(lambda x: x['field'][0], describe['results']))) + columns = list(set([result['field'][0] for result in describe['results']])) schema[table_name] = {'name': table_name, 'columns': columns} - return schema.values() + return list(schema.values()) def run_query(self, query, user): results = self.api.query(query) diff --git a/redash/query_runner/salesforce.py b/redash/query_runner/salesforce.py index 8cc72910ff..3a2af3ab6c 100644 --- a/redash/query_runner/salesforce.py +++ b/redash/query_runner/salesforce.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import re import logging from collections import OrderedDict @@ -51,7 +49,7 @@ class Salesforce(BaseQueryRunner): should_annotate_query = False - + @classmethod def enabled(cls): return enabled @@ -182,7 +180,7 @@ def get_schema(self, get_stats=False): desc = sf.__getattr__(sobject['name']).describe() fields = desc['fields'] schema[table_name] = {'name': table_name, 'columns': [f['name'] for f in fields]} - return schema.values() + return list(schema.values()) register(Salesforce) diff --git a/redash/query_runner/snowflake.py b/redash/query_runner/snowflake.py index 660e455e02..31f5be4c98 100644 --- a/redash/query_runner/snowflake.py +++ b/redash/query_runner/snowflake.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import + try: import snowflake.connector @@ -92,7 +92,7 @@ def run_query(self, query, user): columns = self.fetch_columns( [(i[0], self.determine_type(i[1], i[5])) for i in cursor.description]) - rows = [dict(zip((c['name'] for c in columns), row)) + rows = [dict(zip((column['name'] for column in columns), row)) for row in cursor] data = {'columns': columns, 'rows': rows} @@ -129,7 +129,7 @@ def get_schema(self, get_stats=False): schema[table_name]['columns'].append(row['COLUMN_NAME']) - return schema.values() + return list(schema.values()) register(Snowflake) diff --git a/redash/query_runner/sqlite.py b/redash/query_runner/sqlite.py index c06cc024fe..3881e569f6 100644 --- a/redash/query_runner/sqlite.py +++ b/redash/query_runner/sqlite.py @@ -1,8 +1,5 @@ import logging import sqlite3 -import sys - -from six import reraise from redash.query_runner import BaseSQLQueryRunner, register from redash.utils import json_dumps, json_loads @@ -57,7 +54,7 @@ def _get_tables(self, schema): for row_column in results_table['rows']: schema[table_name]['columns'].append(row_column['name']) - return schema.values() + return list(schema.values()) def run_query(self, query, user): connection = sqlite3.connect(self._dbpath) @@ -69,7 +66,7 @@ def run_query(self, query, user): if cursor.description is not None: columns = self.fetch_columns([(i[0], None) for i in cursor.description]) - rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor] + rows = [dict(zip((column['name'] for column in columns), row)) for row in cursor] data = {'columns': columns, 'rows': rows} error = None @@ -81,12 +78,6 @@ def run_query(self, query, user): connection.cancel() error = "Query cancelled by user." json_data = None - except Exception as e: - # handle unicode error message - err_class = sys.exc_info()[1].__class__ - err_args = [arg.decode('utf-8') for arg in sys.exc_info()[1].args] - unicode_err = err_class(*err_args) - reraise(unicode_err, None, sys.exc_info()[2]) finally: connection.close() return json_data, error diff --git a/redash/query_runner/treasuredata.py b/redash/query_runner/treasuredata.py index 895becaac3..88637ec957 100644 --- a/redash/query_runner/treasuredata.py +++ b/redash/query_runner/treasuredata.py @@ -86,7 +86,7 @@ def get_schema(self, get_stats=False): } except Exception as ex: raise Exception("Failed getting schema") - return schema.values() + return list(schema.values()) def run_query(self, query, user): connection = tdclient.connect( @@ -104,7 +104,7 @@ def run_query(self, query, user): if cursor.rowcount == 0: rows = [] else: - rows = [dict(zip(([c['name'] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())] + rows = [dict(zip(([column['name'] for column in columns]), r)) for r in cursor.fetchall()] data = {'columns': columns, 'rows': rows} json_data = json_dumps(data) error = None diff --git a/redash/query_runner/uptycs.py b/redash/query_runner/uptycs.py index c2573a26d0..3f5a37a209 100644 --- a/redash/query_runner/uptycs.py +++ b/redash/query_runner/uptycs.py @@ -129,7 +129,7 @@ def get_schema(self, get_stats=False): table_json = {"name": table_name, "columns": columns} redash_json.append(table_json) - logger.debug("%s", schema.values()) + logger.debug("%s", list(schema.values())) return redash_json diff --git a/redash/query_runner/vertica.py b/redash/query_runner/vertica.py index e28371d7f3..5c5e80e7d6 100644 --- a/redash/query_runner/vertica.py +++ b/redash/query_runner/vertica.py @@ -97,7 +97,7 @@ def _get_tables(self, schema): schema[table_name]['columns'].append(row['column_name']) - return schema.values() + return list(schema.values()) def run_query(self, query, user): import vertica_python diff --git a/redash/query_runner/yandex_metrica.py b/redash/query_runner/yandex_metrica.py index d008b8a505..3736eb1290 100644 --- a/redash/query_runner/yandex_metrica.py +++ b/redash/query_runner/yandex_metrica.py @@ -1,6 +1,6 @@ import logging import yaml -from urlparse import parse_qs, urlparse +from urllib.parse import parse_qs, urlparse import requests @@ -23,7 +23,7 @@ 'pageViewsInterval', 'pageViews', 'firstVisitYear', 'firstVisitMonth', 'firstVisitDayOfMonth', 'firstVisitDayOfWeek', 'firstVisitMinute', 'firstVisitDekaminute', - ) + ), } for type_, elements in COLUMN_TYPES.items(): @@ -106,7 +106,7 @@ def _get_tables(self, schema): schema[owner]['columns'].append(counter) - return schema.values() + return list(schema.values()) def test_connection(self): self._send_query('management/v1/{0}'.format(self.list_path)) @@ -133,7 +133,7 @@ def run_query(self, query, user): params = yaml.safe_load(query) except ValueError as e: logging.exception(e) - error = unicode(e) + error = str(e) return data, error if isinstance(params, dict): @@ -148,7 +148,7 @@ def run_query(self, query, user): error = None except Exception as e: logging.exception(e) - error = unicode(e) + error = str(e) return data, error diff --git a/redash/schedule.py b/redash/schedule.py index e8bf018057..0a6b75fa0a 100644 --- a/redash/schedule.py +++ b/redash/schedule.py @@ -5,13 +5,13 @@ from rq_scheduler import Scheduler -from redash import settings, redis_connection +from redash import settings, rq_redis_connection from redash.tasks import (sync_user_details, refresh_queries, empty_schedules, refresh_schemas, cleanup_query_results, version_check, send_aggregated_errors) -rq_scheduler = Scheduler(connection=redis_connection, +rq_scheduler = Scheduler(connection=rq_redis_connection, queue_name="periodic", interval=5) diff --git a/redash/serializers/query_result.py b/redash/serializers/query_result.py index 4cc8e827c0..5d43734406 100644 --- a/redash/serializers/query_result.py +++ b/redash/serializers/query_result.py @@ -1,110 +1,109 @@ -import cStringIO -import csv -import xlsxwriter -from funcy import rpartial, project -from dateutil.parser import isoparse as parse_date -from redash.utils import json_loads, UnicodeWriter -from redash.query_runner import (TYPE_BOOLEAN, TYPE_DATE, TYPE_DATETIME) -from redash.authentication.org_resolving import current_org - - -def _convert_format(fmt): - return fmt.replace('DD', '%d').replace('MM', '%m').replace('YYYY', '%Y').replace('YY', '%y').replace('HH', '%H').replace('mm', '%M').replace('ss', '%s') - - -def _convert_bool(value): - if value is True: - return "true" - elif value is False: - return "false" - - return value - - -def _convert_datetime(value, fmt): - if not value: - return value - - try: - parsed = parse_date(value) - ret = parsed.strftime(fmt) - except Exception: - return value - - return ret - - -def _get_column_lists(columns): - date_format = _convert_format(current_org.get_setting('date_format')) - datetime_format = _convert_format('{} {}'.format(current_org.get_setting('date_format'), current_org.get_setting('time_format'))) - - special_types = { - TYPE_BOOLEAN: _convert_bool, - TYPE_DATE: rpartial(_convert_datetime, date_format), - TYPE_DATETIME: rpartial(_convert_datetime, datetime_format) - } - - fieldnames = [] - special_columns = dict() - - for col in columns: - fieldnames.append(col['name']) - - for col_type in special_types.keys(): - if col['type'] == col_type: - special_columns[col['name']] = special_types[col_type] - - return fieldnames, special_columns - - -def serialize_query_result(query_result, is_api_user): - if is_api_user: - publicly_needed_keys = ['data', 'retrieved_at'] - return project(query_result.to_dict(), publicly_needed_keys) - else: - return query_result.to_dict() - - -def serialize_query_result_to_csv(query_result): - s = cStringIO.StringIO() - - query_data = query_result.data - - fieldnames, special_columns = _get_column_lists(query_data['columns'] or []) - - writer = csv.DictWriter(s, extrasaction="ignore", fieldnames=fieldnames) - writer.writer = UnicodeWriter(s) - writer.writeheader() - - for row in query_data['rows']: - for col_name, converter in special_columns.iteritems(): - if col_name in row: - row[col_name] = converter(row[col_name]) - - writer.writerow(row) - - return s.getvalue() - - -def serialize_query_result_to_xlsx(query_result): - s = cStringIO.StringIO() - - query_data = query_result.data - book = xlsxwriter.Workbook(s, {'constant_memory': True}) - sheet = book.add_worksheet("result") - - column_names = [] - for (c, col) in enumerate(query_data['columns']): - sheet.write(0, c, col['name']) - column_names.append(col['name']) - - for (r, row) in enumerate(query_data['rows']): - for (c, name) in enumerate(column_names): - v = row.get(name) - if isinstance(v, list) or isinstance(v, dict): - v = str(v).encode('utf-8') - sheet.write(r + 1, c, v) - - book.close() - - return s.getvalue() +import io +import csv +import xlsxwriter +from funcy import rpartial, project +from dateutil.parser import isoparse as parse_date +from redash.utils import json_loads, UnicodeWriter +from redash.query_runner import (TYPE_BOOLEAN, TYPE_DATE, TYPE_DATETIME) +from redash.authentication.org_resolving import current_org + + +def _convert_format(fmt): + return fmt.replace('DD', '%d').replace('MM', '%m').replace('YYYY', '%Y').replace('YY', '%y').replace('HH', '%H').replace('mm', '%M').replace('ss', '%s') + + +def _convert_bool(value): + if value is True: + return "true" + elif value is False: + return "false" + + return value + + +def _convert_datetime(value, fmt): + if not value: + return value + + try: + parsed = parse_date(value) + ret = parsed.strftime(fmt) + except Exception: + return value + + return ret + + +def _get_column_lists(columns): + date_format = _convert_format(current_org.get_setting('date_format')) + datetime_format = _convert_format('{} {}'.format(current_org.get_setting('date_format'), current_org.get_setting('time_format'))) + + special_types = { + TYPE_BOOLEAN: _convert_bool, + TYPE_DATE: rpartial(_convert_datetime, date_format), + TYPE_DATETIME: rpartial(_convert_datetime, datetime_format), + } + + fieldnames = [] + special_columns = dict() + + for col in columns: + fieldnames.append(col['name']) + + for col_type in special_types.keys(): + if col['type'] == col_type: + special_columns[col['name']] = special_types[col_type] + + return fieldnames, special_columns + + +def serialize_query_result(query_result, is_api_user): + if is_api_user: + publicly_needed_keys = ['data', 'retrieved_at'] + return project(query_result.to_dict(), publicly_needed_keys) + else: + return query_result.to_dict() + + +def serialize_query_result_to_csv(query_result): + s = io.StringIO() + + query_data = query_result.data + + fieldnames, special_columns = _get_column_lists(query_data['columns'] or []) + + writer = csv.DictWriter(s, extrasaction="ignore", fieldnames=fieldnames) + writer.writeheader() + + for row in query_data['rows']: + for col_name, converter in special_columns.items(): + if col_name in row: + row[col_name] = converter(row[col_name]) + + writer.writerow(row) + + return s.getvalue() + + +def serialize_query_result_to_xlsx(query_result): + output = io.BytesIO() + + query_data = query_result.data + book = xlsxwriter.Workbook(output, {'constant_memory': True}) + sheet = book.add_worksheet("result") + + column_names = [] + for c, col in enumerate(query_data['columns']): + sheet.write(0, c, col['name']) + column_names.append(col['name']) + + for r, row in enumerate(query_data['rows']): + for c, name in enumerate(column_names): + v = row.get(name) + if isinstance(v, (dict, list)): + v = str(v) + sheet.write(r + 1, c, v) + + book.close() + + return output.getvalue() diff --git a/redash/settings/__init__.py b/redash/settings/__init__.py index 53c0a02543..a5de3b64f6 100644 --- a/redash/settings/__init__.py +++ b/redash/settings/__init__.py @@ -4,10 +4,13 @@ from funcy import distinct, remove from flask_talisman import talisman -from .helpers import fix_assets_path, array_from_string, parse_boolean, int_or_none, set_from_string +from .helpers import fix_assets_path, array_from_string, parse_boolean, int_or_none, set_from_string, add_decode_responses_to_redis_url from .organization import DATE_FORMAT, TIME_FORMAT # noqa -REDIS_URL = os.environ.get('REDASH_REDIS_URL', os.environ.get('REDIS_URL', "redis://localhost:6379/0")) +# _REDIS_URL is the unchanged REDIS_URL we get from env vars, to be used later with Celery +_REDIS_URL = os.environ.get('REDASH_REDIS_URL', os.environ.get('REDIS_URL', "redis://localhost:6379/0")) +# This is the one to use for Redash' own connection: +REDIS_URL = add_decode_responses_to_redis_url(_REDIS_URL) PROXIES_COUNT = int(os.environ.get('REDASH_PROXIES_COUNT', "1")) STATSD_HOST = os.environ.get('REDASH_STATSD_HOST', "127.0.0.1") @@ -23,8 +26,10 @@ SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_ECHO = False +RQ_REDIS_URL = os.environ.get("RQ_REDIS_URL", _REDIS_URL) + # Celery related settings -CELERY_BROKER = os.environ.get("REDASH_CELERY_BROKER", REDIS_URL) +CELERY_BROKER = os.environ.get("REDASH_CELERY_BROKER", _REDIS_URL) CELERY_RESULT_BACKEND = os.environ.get( "REDASH_CELERY_RESULT_BACKEND", os.environ.get("REDASH_CELERY_BACKEND", CELERY_BROKER)) @@ -331,10 +336,10 @@ def email_server_is_configured(): # Client side toggles: ALLOW_SCRIPTS_IN_USER_INPUT = parse_boolean(os.environ.get("REDASH_ALLOW_SCRIPTS_IN_USER_INPUT", "false")) -DASHBOARD_REFRESH_INTERVALS = map(int, array_from_string(os.environ.get("REDASH_DASHBOARD_REFRESH_INTERVALS", "60,300,600,1800,3600,43200,86400"))) -QUERY_REFRESH_INTERVALS = map(int, array_from_string(os.environ.get("REDASH_QUERY_REFRESH_INTERVALS", "60, 300, 600, 900, 1800, 3600, 7200, 10800, 14400, 18000, 21600, 25200, 28800, 32400, 36000, 39600, 43200, 86400, 604800, 1209600, 2592000"))) +DASHBOARD_REFRESH_INTERVALS = list(map(int, array_from_string(os.environ.get("REDASH_DASHBOARD_REFRESH_INTERVALS", "60,300,600,1800,3600,43200,86400")))) +QUERY_REFRESH_INTERVALS = list(map(int, array_from_string(os.environ.get("REDASH_QUERY_REFRESH_INTERVALS", "60, 300, 600, 900, 1800, 3600, 7200, 10800, 14400, 18000, 21600, 25200, 28800, 32400, 36000, 39600, 43200, 86400, 604800, 1209600, 2592000")))) PAGE_SIZE = int(os.environ.get('REDASH_PAGE_SIZE', 20)) -PAGE_SIZE_OPTIONS = map(int, array_from_string(os.environ.get("REDASH_PAGE_SIZE_OPTIONS", "5,10,20,50,100"))) +PAGE_SIZE_OPTIONS = list(map(int, array_from_string(os.environ.get("REDASH_PAGE_SIZE_OPTIONS", "5,10,20,50,100")))) TABLE_CELL_MAX_JSON_SIZE = int(os.environ.get('REDASH_TABLE_CELL_MAX_JSON_SIZE', 50000)) # Features: diff --git a/redash/settings/helpers.py b/redash/settings/helpers.py index 98946d81e4..03fd83c49f 100644 --- a/redash/settings/helpers.py +++ b/redash/settings/helpers.py @@ -1,4 +1,5 @@ import os +from urllib.parse import urlparse, urlunparse def fix_assets_path(path): @@ -34,3 +35,16 @@ def int_or_none(value): return value return int(value) + + +def add_decode_responses_to_redis_url(url): + """Make sure that the Redis URL includes the `decode_responses` option.""" + parsed = urlparse(url) + + query = 'decode_responses=True' + if parsed.query and 'decode_responses' not in parsed.query: + query = "{}&{}".format(parsed.query, query) + elif 'decode_responses' in parsed.query: + query = parsed.query + + return urlunparse([parsed.scheme, parsed.netloc, parsed.path, parsed.params, query, parsed.fragment]) diff --git a/redash/settings/organization.py b/redash/settings/organization.py index 853a6cd4ec..4aa9b4b1f2 100644 --- a/redash/settings/organization.py +++ b/redash/settings/organization.py @@ -1,4 +1,4 @@ -from __future__ import print_function + import os from .helpers import parse_boolean diff --git a/redash/tasks/failure_report.py b/redash/tasks/failure_report.py index 3845dc06b3..398803ac3b 100644 --- a/redash/tasks/failure_report.py +++ b/redash/tasks/failure_report.py @@ -44,7 +44,7 @@ def send_failure_report(user_id): 'failure_reason': v.get('message'), 'failure_count': occurrences[k], 'comment': comment_for(v) - } for k, v in unique_errors.iteritems()], + } for k, v in unique_errors.items()], 'base_url': base_url(user.org) } diff --git a/redash/tasks/queries/execution.py b/redash/tasks/queries/execution.py index 90d58e002f..eead672319 100644 --- a/redash/tasks/queries/execution.py +++ b/redash/tasks/queries/execution.py @@ -60,7 +60,7 @@ def to_dict(self): error = TIMEOUT_MESSAGE status = 4 elif isinstance(result, Exception): - error = result.message + error = str(result) status = 4 elif task_status == 'REVOKED': error = 'Query execution cancelled.' @@ -227,7 +227,7 @@ def run(self): run_time = time.time() - started_at - logger.info(u"task=execute_query query_hash=%s data_length=%s error=[%s]", + logger.info("task=execute_query query_hash=%s data_length=%s error=[%s]", self.query_hash, data and len(data), error) _unlock(self.query_hash, self.data_source.id) @@ -248,7 +248,7 @@ def run(self): self.data_source.org_id, self.data_source, self.query_hash, self.query, data, run_time, utcnow()) - + updated_query_ids = models.Query.update_latest_result(query_result) models.db.session.commit() # make sure that alert sees the latest query result @@ -271,7 +271,7 @@ def _annotate_query(self, query_runner): def _log_progress(self, state): logger.info( - u"task=execute_query state=%s query_hash=%s type=%s ds_id=%d " + "task=execute_query state=%s query_hash=%s type=%s ds_id=%d " "task_id=%s queue=%s query_id=%s username=%s", state, self.query_hash, self.data_source.type, self.data_source.id, self.task.request.id, diff --git a/redash/tasks/queries/maintenance.py b/redash/tasks/queries/maintenance.py index ea78667565..40d3155ed7 100644 --- a/redash/tasks/queries/maintenance.py +++ b/redash/tasks/queries/maintenance.py @@ -50,7 +50,7 @@ def refresh_queries(): try: query_text = query.parameterized.apply(parameters).query except InvalidParameterError as e: - error = u"Skipping refresh of {} because of invalid parameters: {}".format(query.id, e.message) + error = u"Skipping refresh of {} because of invalid parameters: {}".format(query.id, str(e)) track_failure(query, error) continue except QueryDetachedFromDataSourceError as e: diff --git a/redash/utils/__init__.py b/redash/utils/__init__.py index c430977aef..3302a41587 100644 --- a/redash/utils/__init__.py +++ b/redash/utils/__init__.py @@ -1,5 +1,5 @@ import codecs -import cStringIO +import io import csv import datetime import decimal @@ -22,10 +22,6 @@ from .human_time import parse_human_time -try: - buffer -except NameError: - buffer = bytes COMMENTS_REGEX = re.compile("/\*.*?\*/") WRITER_ENCODING = os.environ.get('REDASH_CSV_WRITER_ENCODING', 'utf-8') @@ -102,8 +98,10 @@ def default(self, o): result = o.isoformat() if o.microsecond: result = result[:12] - elif isinstance(o, buffer): - result = binascii.hexlify(o) + elif isinstance(o, memoryview): + result = binascii.hexlify(o).decode() + elif isinstance(o, bytes): + result = binascii.hexlify(o).decode() else: result = super(JSONEncoder, self).default(o) return result @@ -119,6 +117,7 @@ def json_dumps(data, *args, **kwargs): """A custom JSON dumping function which passes all parameters to the simplejson.dumps function.""" kwargs.setdefault('cls', JSONEncoder) + kwargs.setdefault('encoding', None) return simplejson.dumps(data, *args, **kwargs) @@ -145,7 +144,7 @@ class UnicodeWriter: def __init__(self, f, dialect=csv.excel, encoding=WRITER_ENCODING, **kwds): # Redirect output to a queue - self.queue = cStringIO.StringIO() + self.queue = io.StringIO() self.writer = csv.writer(self.queue, dialect=dialect, **kwds) self.stream = f self.encoder = codecs.getincrementalencoder(encoding)() @@ -176,7 +175,7 @@ def writerows(self, rows): def collect_parameters_from_request(args): parameters = {} - for k, v in args.iteritems(): + for k, v in args.items(): if k.startswith('p_'): parameters[k[2:]] = v diff --git a/redash/utils/compat.py b/redash/utils/compat.py deleted file mode 100644 index cb4ebfb8ab..0000000000 --- a/redash/utils/compat.py +++ /dev/null @@ -1,4 +0,0 @@ -try: - long = long -except NameError: - long = int diff --git a/redash/utils/configuration.py b/redash/utils/configuration.py index 41625d6468..c75d27a6c1 100644 --- a/redash/utils/configuration.py +++ b/redash/utils/configuration.py @@ -55,7 +55,7 @@ def to_json(self): return json_dumps(self._config, sort_keys=True) def iteritems(self): - return self._config.iteritems() + return self._config.items() def to_dict(self, mask_secrets=False): if mask_secrets is False or 'secret' not in self.schema: @@ -72,7 +72,7 @@ def update(self, new_config): jsonschema.validate(new_config, self.schema) config = {} - for k, v in new_config.iteritems(): + for k, v in new_config.items(): if k in self.schema.get('secret', []) and v == SECRET_PLACEHOLDER: config[k] = self[k] else: diff --git a/redash/worker.py b/redash/worker.py index 159fcff291..6e3e68b5fc 100644 --- a/redash/worker.py +++ b/redash/worker.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import + from datetime import timedelta from functools import partial diff --git a/requirements.txt b/requirements.txt index ff73441510..771c2e5c60 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,61 +1,60 @@ -Flask==0.12.4 -Werkzeug==0.11.11 -Jinja2==2.8 -itsdangerous==0.24 -click==6.6 -MarkupSafe==0.23 -pyOpenSSL==17.5.0 -httplib2==0.10.3 +Flask==1.1.1 +Jinja2==2.10.3 +itsdangerous==1.1.0 +click==6.7 +MarkupSafe==1.1.1 +pyOpenSSL==19.0.0 +httplib2==0.14.0 wtforms==2.2.1 -Flask-RESTful==0.3.5 -Flask-Login==0.4.0 +Flask-RESTful==0.3.7 +Flask-Login==0.4.1 Flask-OAuthLib==0.9.5 # pin this until https://github.com/lepture/flask-oauthlib/pull/388 is released requests-oauthlib>=0.6.2,<1.2.0 -Flask-SQLAlchemy==2.3.2 -Flask-Migrate==2.0.1 +Flask-SQLAlchemy==2.4.1 +Flask-Migrate==2.5.2 flask-mail==0.9.1 -flask-talisman==0.6.0 +flask-talisman==0.7.0 Flask-Limiter==0.9.3 -passlib==1.6.2 -aniso8601==1.1.0 -blinker==1.3 -psycopg2==2.7.3.2 +passlib==1.7.1 +aniso8601==8.0.0 +blinker==1.4 +psycopg2==2.8.3 python-dateutil==2.8.0 -pytz==2016.7 -PyYAML==3.12 -redis==3.2.1 -requests==2.21.0 +pytz>=2019.3 +PyYAML==5.1.2 +redis==3.3.11 +requests==2.22.0 six==1.12.0 -SQLAlchemy==1.2.12 +SQLAlchemy==1.3.10 # We can't upgrade SQLAlchemy-Searchable version as newer versions require PostgreSQL > 9.6, but we target older versions at the moment. SQLAlchemy-Searchable==0.10.6 # We need to pin the version of pyparsing, as newer versions break SQLAlchemy-Searchable-10.0.6 (newer versions no longer depend on it) pyparsing==2.3.0 -SQLAlchemy-Utils==0.33.11 -sqlparse==0.2.4 -statsd==2.1.2 -gunicorn==19.7.1 +SQLAlchemy-Utils==0.34.2 +sqlparse==0.3.0 +statsd==3.3.0 +gunicorn==19.9.0 rq==1.1.0 -rq-scheduler==0.9 +rq-scheduler==0.9.1 celery==4.3.0 -kombu==4.6.3 -jsonschema==2.4.0 -RestrictedPython==3.6.0 -pysaml2==4.5.0 +kombu==4.6.5 +jsonschema==3.1.1 +RestrictedPython==5.0 +pysaml2==4.8.0 pycrypto==2.6.1 -funcy==1.7.1 +funcy==1.13 sentry-sdk>=0.12.2,<0.13.0 -semver==2.2.1 -xlsxwriter==0.9.3 +semver==2.8.1 +xlsxwriter==1.2.2 pystache==0.5.4 -parsedatetime==2.1 -PyJWT==1.6.4 -cryptography==2.3 -simplejson==3.10.0 -ua-parser==0.7.3 -user-agents==1.1.0 -python-geoip-geolite2==2015.303 +parsedatetime==2.4 +PyJWT==1.7.1 +cryptography==2.7 +simplejson==3.16.0 +ua-parser==0.8.0 +user-agents==2.0 +maxminddb-geolite2==2018.703 chromelogger==0.4.3 pypd==1.1.0 disposable-email-domains>=0.0.52 diff --git a/requirements_all_ds.txt b/requirements_all_ds.txt index 45adfaae0d..05cc8298d9 100644 --- a/requirements_all_ds.txt +++ b/requirements_all_ds.txt @@ -1,34 +1,34 @@ -google-api-python-client==1.5.1 +google-api-python-client==1.7.11 gspread==3.1.0 -impyla==0.10.0 -influxdb==2.7.1 +impyla==0.16.0 +influxdb==5.2.3 mysqlclient==1.3.14 -oauth2client==3.0.0 -pyhive==0.5.1 -pymongo[tls,srv]==3.6.1 -vertica-python==0.8.0 -td-client==0.8.0 -pymssql==2.1.3 -dql==0.5.24 -dynamo3==0.4.7 -boto3==1.9.115 -botocore>=1.12.239,<1.13.0 +oauth2client==4.1.3 +pyhive==0.6.1 +pymongo[tls,srv]==3.9.0 +vertica-python==0.9.5 +td-client==1.0.0 +pymssql==2.1.4 +dql==0.5.26 +dynamo3==0.4.10 +boto3>=1.9.241,<1.10.0 +botocore>=1.12.241,<1.13.0 sasl>=0.1.3 thrift>=0.8.0 thrift_sasl>=0.1.0 -cassandra-driver==3.11.0 +cassandra-driver==3.19.0 memsql==3.0.0 -atsd_client==2.0.12 -simple_salesforce==0.72.2 +atsd_client==3.0.5 +simple_salesforce==0.74.3 PyAthena>=1.5.0 -pymapd==0.7.1 +pymapd==0.16.0 qds-sdk>=1.9.6 ibm-db>=2.0.9 -pydruid==0.4 -requests_aws_sign==0.1.4 -snowflake_connector_python==1.8.6 +pydruid==0.5.5 +requests_aws_sign==0.1.5 +snowflake_connector_python==2.0.0 phoenixdb==0.7 # certifi is needed to support MongoDB and SSL: -certifi -pydgraph==1.2.0 -azure-kusto-data==0.0.32 +certifi>=2019.9.11 +pydgraph==2.0.2 +azure-kusto-data==0.0.35 diff --git a/requirements_bundles.txt b/requirements_bundles.txt index 8bab330057..39d635ee28 100644 --- a/requirements_bundles.txt +++ b/requirements_bundles.txt @@ -6,4 +6,3 @@ # These can be removed when upgrading to Python 3.x importlib-metadata>=0.19 # remove when on 3.8 importlib_resources==1.0.2 # remove when on 3.7 -pathlib2==2.3.3 # remove when on 3.x diff --git a/requirements_dev.txt b/requirements_dev.txt index 3575c5bf70..14d803d005 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -1,13 +1,13 @@ -pytest==3.2.3 -pytest-cov==2.5.1 -coverage==4.0.3 -mock==2.0.0 +pytest==5.2.1 +pytest-cov==2.8.1 +coverage==4.5.4 +mock==3.0.5 # PyMongo and Athena dependencies are needed for some of the unit tests: # (this is not perfect and we should resolve this in a different way) -pymongo[tls,srv]==3.6.1 -botocore>=1.12.239,<1.13.0 +pymongo[srv,tls]==3.9.0 +botocore>=1.12.241,<1.13.0 PyAthena>=1.5.0 -ptvsd==4.2.3 -freezegun==0.3.11 +ptvsd==4.3.2 +freezegun==0.3.12 watchdog==0.9.0 diff --git a/tests/__init__.py b/tests/__init__.py index 9d0f0832b2..566052c2af 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -23,8 +23,8 @@ from tests.factories import Factory, user_factory -logging.disable("INFO") -logging.getLogger("metrics").setLevel("ERROR") +logging.disable(logging.INFO) +logging.getLogger("metrics").setLevel(logging.ERROR) def authenticate_request(c, user): @@ -47,7 +47,6 @@ def setUp(self): self.app = create_app() self.db = db self.app.config['TESTING'] = True - self.app.config['SERVER_NAME'] = 'localhost' limiter.enabled = False self.app_ctx = self.app.app_context() self.app_ctx.push() @@ -95,10 +94,6 @@ def make_request(self, method, path, org=None, user=None, data=None, content_type=content_type, follow_redirects=follow_redirects, ) - - if response.data and is_json: - response.json = json_loads(response.data) - return response def get_request(self, path, org=None, headers=None): @@ -114,7 +109,7 @@ def post_request(self, path, data=None, org=None, headers=None): return self.client.post(path, data=data, headers=headers) def assertResponseEqual(self, expected, actual): - for k, v in expected.iteritems(): + for k, v in expected.items(): if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime): continue diff --git a/tests/factories.py b/tests/factories.py index 2c82e186da..2915cc7d4a 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -41,7 +41,7 @@ def __call__(self): user_factory = ModelFactory(redash.models.User, - name='John Doe', email=Sequence(u'test{}@example.com'), + name='John Doe', email=Sequence('test{}@example.com'), password_hash=pwd_context.encrypt('test1234'), group_ids=[2], org_id=1) @@ -71,7 +71,7 @@ def __call__(self): query_factory = ModelFactory(redash.models.Query, name='Query', description='', - query_text=u'SELECT 1', + query_text='SELECT 1', user=user_factory.create, is_archived=False, is_draft=False, diff --git a/tests/handlers/test_dashboards.py b/tests/handlers/test_dashboards.py index 08e0c5cce2..4207cb2bde 100644 --- a/tests/handlers/test_dashboards.py +++ b/tests/handlers/test_dashboards.py @@ -10,10 +10,10 @@ class TestDashboardListResource(BaseTestCase): def test_create_new_dashboard(self): dashboard_name = 'Test Dashboard' rv = self.make_request('post', '/api/dashboards', data={'name': dashboard_name}) - self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.json['name'], 'Test Dashboard') - self.assertEquals(rv.json['user_id'], self.factory.user.id) - self.assertEquals(rv.json['layout'], []) + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.json['name'], 'Test Dashboard') + self.assertEqual(rv.json['user_id'], self.factory.user.id) + self.assertEqual(rv.json['layout'], []) class TestDashboardListGetResource(BaseTestCase): @@ -25,16 +25,16 @@ def test_returns_dashboards(self): rv = self.make_request('get', '/api/dashboards') assert len(rv.json['results']) == 3 - assert set(map(lambda d: d['id'], rv.json['results'])) == set([d1.id, d2.id, d3.id]) + assert set([result['id'] for result in rv.json['results']]) == set([d1.id, d2.id, d3.id]) def test_filters_with_tags(self): - d1 = self.factory.create_dashboard(tags=[u'test']) + d1 = self.factory.create_dashboard(tags=['test']) d2 = self.factory.create_dashboard() d3 = self.factory.create_dashboard() rv = self.make_request('get', '/api/dashboards?tags=test') assert len(rv.json['results']) == 1 - assert set(map(lambda d: d['id'], rv.json['results'])) == set([d1.id]) + assert set([result['id'] for result in rv.json['results']]) == set([d1.id]) def test_search_term(self): d1 = self.factory.create_dashboard(name="Sales") @@ -43,14 +43,14 @@ def test_search_term(self): rv = self.make_request('get', '/api/dashboards?q=sales') assert len(rv.json['results']) == 2 - assert set(map(lambda d: d['id'], rv.json['results'])) == set([d1.id, d2.id]) + assert set([result['id'] for result in rv.json['results']]) == set([d1.id, d2.id]) class TestDashboardResourceGet(BaseTestCase): def test_get_dashboard(self): d1 = self.factory.create_dashboard() rv = self.make_request('get', '/api/dashboards/{0}'.format(d1.slug)) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) expected = serialize_dashboard(d1, with_widgets=True, with_favorite_state=False) actual = json_loads(rv.data) @@ -69,13 +69,13 @@ def test_get_dashboard_filters_unauthorized_widgets(self): db.session.commit() rv = self.make_request('get', '/api/dashboards/{0}'.format(dashboard.slug)) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) self.assertTrue(rv.json['widgets'][0]['restricted']) self.assertNotIn('restricted', rv.json['widgets'][1]) def test_get_non_existing_dashboard(self): rv = self.make_request('get', '/api/dashboards/not_existing') - self.assertEquals(rv.status_code, 404) + self.assertEqual(rv.status_code, 404) class TestDashboardResourcePost(BaseTestCase): @@ -84,8 +84,8 @@ def test_update_dashboard(self): new_name = 'New Name' rv = self.make_request('post', '/api/dashboards/{0}'.format(d.id), data={'name': new_name, 'layout': '[]'}) - self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.json['name'], new_name) + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.json['name'], new_name) def test_raises_error_in_case_of_conflict(self): d = self.factory.create_dashboard() @@ -130,7 +130,7 @@ def test_delete_dashboard(self): d = self.factory.create_dashboard() rv = self.make_request('delete', '/api/dashboards/{0}'.format(d.slug)) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) d = Dashboard.get_by_slug_and_org(d.slug, d.org) self.assertTrue(d.is_archived) diff --git a/tests/handlers/test_data_sources.py b/tests/handlers/test_data_sources.py index af0347a286..4aa0b4fc61 100644 --- a/tests/handlers/test_data_sources.py +++ b/tests/handlers/test_data_sources.py @@ -31,7 +31,8 @@ def test_returns_data_sources_ordered_by_id(self): self.factory.create_data_source(group=self.factory.org.default_group) self.factory.create_data_source(group=self.factory.org.default_group) response = self.make_request("get", "/api/data_sources", user=self.factory.user) - self.assertTrue(all(left <= right for left, right in pairwise(response.json))) + ids = [datasource['id'] for datasource in response.json] + self.assertTrue(all(left <= right for left, right in pairwise(ids))) class DataSourceTypesTest(BaseTestCase): @@ -45,7 +46,7 @@ def test_does_not_show_deprecated_types(self): with patch.object(PostgreSQL, 'deprecated', return_value=True): rv = self.make_request('get', "/api/data_sources/types", user=admin) - types = map(lambda x: x['type'], rv.json) + types = [datasource_type['type'] for datasource_type in rv.json] self.assertNotIn('pg', types) def test_returns_403_for_non_admin(self): diff --git a/tests/handlers/test_destinations.py b/tests/handlers/test_destinations.py index f4fb816885..6736e936b7 100644 --- a/tests/handlers/test_destinations.py +++ b/tests/handlers/test_destinations.py @@ -80,7 +80,7 @@ def test_post(self): d = NotificationDestination.query.get(d.id) self.assertEqual(d.name, data['name']) self.assertEqual(d.options['url'], data['options']['url']) - + class DestinationTypesTest(BaseTestCase): def test_does_not_show_deprecated_types(self): @@ -88,5 +88,5 @@ def test_does_not_show_deprecated_types(self): with patch.object(Slack, 'deprecated', return_value=True): rv = self.make_request('get', "/api/destinations/types", user=admin) - types = map(lambda x: x['type'], rv.json) - self.assertNotIn('slack', types) \ No newline at end of file + types = [destination_type['type'] for destination_type in rv.json] + self.assertNotIn('slack', types) diff --git a/tests/handlers/test_embed.py b/tests/handlers/test_embed.py index 9f471882bd..f4c90c3276 100644 --- a/tests/handlers/test_embed.py +++ b/tests/handlers/test_embed.py @@ -6,7 +6,7 @@ class TestUnembedables(BaseTestCase): def test_not_embedable(self): query = self.factory.create_query() res = self.make_request('get', '/api/queries/{0}'.format(query.id)) - self.assertEquals(res.status_code, 200) + self.assertEqual(res.status_code, 200) self.assertIn("frame-ancestors 'none'", res.headers['Content-Security-Policy']) self.assertEqual(res.headers['X-Frame-Options'], 'deny') diff --git a/tests/handlers/test_queries.py b/tests/handlers/test_queries.py index cfd51e3907..89f39dd933 100644 --- a/tests/handlers/test_queries.py +++ b/tests/handlers/test_queries.py @@ -12,7 +12,7 @@ def test_get_query(self): rv = self.make_request('get', '/api/queries/{0}'.format(query.id)) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) expected = serialize_query(query, with_visualizations=True) expected['can_edit'] = True expected['is_favorite'] = False @@ -22,8 +22,8 @@ def test_get_all_queries(self): [self.factory.create_query() for _ in range(10)] rv = self.make_request('get', '/api/queries') - self.assertEquals(rv.status_code, 200) - self.assertEquals(len(rv.json['results']), 10) + self.assertEqual(rv.status_code, 200) + self.assertEqual(len(rv.json['results']), 10) def test_query_without_data_source_should_be_available_only_by_admin(self): query = self.factory.create_query() @@ -31,10 +31,10 @@ def test_query_without_data_source_should_be_available_only_by_admin(self): db.session.add(query) rv = self.make_request('get', '/api/queries/{}'.format(query.id)) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) rv = self.make_request('get', '/api/queries/{}'.format(query.id), user=self.factory.create_admin()) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) def test_query_only_accessible_to_users_from_its_organization(self): second_org = self.factory.create_org() @@ -45,10 +45,10 @@ def test_query_only_accessible_to_users_from_its_organization(self): db.session.add(query) rv = self.make_request('get', '/api/queries/{}'.format(query.id), user=second_org_admin) - self.assertEquals(rv.status_code, 404) + self.assertEqual(rv.status_code, 404) rv = self.make_request('get', '/api/queries/{}'.format(query.id), user=self.factory.create_admin()) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) def test_query_search(self): names = [ @@ -62,22 +62,22 @@ def test_query_search(self): rv = self.make_request('get', '/api/queries?q=better') - self.assertEquals(rv.status_code, 200) - self.assertEquals(len(rv.json['results']), 1) + self.assertEqual(rv.status_code, 200) + self.assertEqual(len(rv.json['results']), 1) rv = self.make_request('get', '/api/queries?q=better OR faster') - self.assertEquals(rv.status_code, 200) - self.assertEquals(len(rv.json['results']), 2) + self.assertEqual(rv.status_code, 200) + self.assertEqual(len(rv.json['results']), 2) # test the old search API and that it redirects to the new one rv = self.make_request('get', '/api/queries/search?q=stronger') - self.assertEquals(rv.status_code, 301) + self.assertEqual(rv.status_code, 301) self.assertIn('/api/queries?q=stronger', rv.headers['Location']) rv = self.make_request('get', '/api/queries/search?q=stronger', follow_redirects=True) - self.assertEquals(rv.status_code, 200) - self.assertEquals(len(rv.json['results']), 1) + self.assertEqual(rv.status_code, 200) + self.assertEqual(len(rv.json['results']), 1) class TestQueryResourcePost(BaseTestCase): @@ -201,16 +201,16 @@ def test_returns_queries(self): rv = self.make_request('get', '/api/queries') assert len(rv.json['results']) == 3 - assert set(map(lambda d: d['id'], rv.json['results'])) == set([q1.id, q2.id, q3.id]) + assert set([result['id'] for result in rv.json['results']]) == set([q1.id, q2.id, q3.id]) def test_filters_with_tags(self): - q1 = self.factory.create_query(tags=[u'test']) + q1 = self.factory.create_query(tags=['test']) self.factory.create_query() self.factory.create_query() rv = self.make_request('get', '/api/queries?tags=test') assert len(rv.json['results']) == 1 - assert set(map(lambda d: d['id'], rv.json['results'])) == set([q1.id]) + assert set([result['id'] for result in rv.json['results']]) == set([q1.id]) def test_search_term(self): q1 = self.factory.create_query(name="Sales") @@ -219,7 +219,7 @@ def test_search_term(self): rv = self.make_request('get', '/api/queries?q=sales') assert len(rv.json['results']) == 2 - assert set(map(lambda d: d['id'], rv.json['results'])) == set([q1.id, q2.id]) + assert set([result['id'] for result in rv.json['results']]) == set([q1.id, q2.id]) class TestQueryListResourcePost(BaseTestCase): @@ -233,14 +233,14 @@ def test_create_query(self): rv = self.make_request('post', '/api/queries', data=query_data) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) self.assertDictContainsSubset(query_data, rv.json) - self.assertEquals(rv.json['user']['id'], self.factory.user.id) + self.assertEqual(rv.json['user']['id'], self.factory.user.id) self.assertIsNotNone(rv.json['api_key']) self.assertIsNotNone(rv.json['query_hash']) query = models.Query.query.get(rv.json['id']) - self.assertEquals(len(list(query.visualizations)), 1) + self.assertEqual(len(list(query.visualizations)), 1) self.assertTrue(query.is_draft) def test_allows_association_with_authorized_dropdown_queries(self): @@ -320,7 +320,7 @@ def test_returns_queries(self): rv = self.make_request('get', '/api/queries/archive') assert len(rv.json['results']) == 2 - assert set(map(lambda d: d['id'], rv.json['results'])) == set([q1.id, q2.id]) + assert set([result['id'] for result in rv.json['results']]) == set([q1.id, q2.id]) def test_search_term(self): q1 = self.factory.create_query(name="Sales", is_archived=True) @@ -329,7 +329,7 @@ def test_search_term(self): rv = self.make_request('get', '/api/queries/archive?q=sales') assert len(rv.json['results']) == 2 - assert set(map(lambda d: d['id'], rv.json['results'])) == set([q1.id, q2.id]) + assert set([result['id'] for result in rv.json['results']]) == set([q1.id, q2.id]) class QueryRefreshTest(BaseTestCase): @@ -344,14 +344,14 @@ def test_refresh_regular_query(self): self.assertEqual(200, response.status_code) def test_refresh_of_query_with_parameters(self): - self.query.query_text = u"SELECT {{param}}" + self.query.query_text = "SELECT {{param}}" db.session.add(self.query) response = self.make_request('post', "{}?p_param=1".format(self.path)) self.assertEqual(200, response.status_code) def test_refresh_of_query_with_parameters_without_parameters(self): - self.query.query_text = u"SELECT {{param}}" + self.query.query_text = "SELECT {{param}}" db.session.add(self.query) response = self.make_request('post', "{}".format(self.path)) @@ -384,7 +384,7 @@ def test_non_admin_cannot_regenerate_api_key_of_other_user(self): self.assertEqual(rv.status_code, 403) reloaded_query = models.Query.query.get(query.id) - self.assertEquals(orig_api_key, reloaded_query.api_key) + self.assertEqual(orig_api_key, reloaded_query.api_key) def test_admin_can_regenerate_api_key_of_other_user(self): query_creator = self.factory.create_user() @@ -396,7 +396,7 @@ def test_admin_can_regenerate_api_key_of_other_user(self): self.assertEqual(rv.status_code, 200) reloaded_query = models.Query.query.get(query.id) - self.assertNotEquals(orig_api_key, reloaded_query.api_key) + self.assertNotEqual(orig_api_key, reloaded_query.api_key) def test_admin_can_regenerate_api_key_of_myself(self): query_creator = self.factory.create_user() @@ -408,7 +408,7 @@ def test_admin_can_regenerate_api_key_of_myself(self): self.assertEqual(rv.status_code, 200) updated_query = models.Query.query.get(query.id) - self.assertNotEquals(orig_api_key, updated_query.api_key) + self.assertNotEqual(orig_api_key, updated_query.api_key) def test_user_can_regenerate_api_key_of_myself(self): user = self.factory.create_user() @@ -419,7 +419,7 @@ def test_user_can_regenerate_api_key_of_myself(self): self.assertEqual(rv.status_code, 200) updated_query = models.Query.query.get(query.id) - self.assertNotEquals(orig_api_key, updated_query.api_key) + self.assertNotEqual(orig_api_key, updated_query.api_key) class TestQueryForkResourcePost(BaseTestCase): diff --git a/tests/handlers/test_query_results.py b/tests/handlers/test_query_results.py index e54e12648c..a5e50a0a09 100644 --- a/tests/handlers/test_query_results.py +++ b/tests/handlers/test_query_results.py @@ -35,8 +35,8 @@ def test_get_existing_result(self): rv = self.make_request('post', '/api/query_results', data={'data_source_id': self.factory.data_source.id, 'query': query.query_text}) - self.assertEquals(rv.status_code, 200) - self.assertEquals(query_result.id, rv.json['query_result']['id']) + self.assertEqual(rv.status_code, 200) + self.assertEqual(query_result.id, rv.json['query_result']['id']) def test_execute_new_query(self): query_result = self.factory.create_query_result() @@ -47,7 +47,7 @@ def test_execute_new_query(self): 'query': query.query_text, 'max_age': 0}) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) self.assertNotIn('query_result', rv.json) self.assertIn('job', rv.json) @@ -63,7 +63,7 @@ def test_execute_query_without_access(self): 'max_age': 0}, user=user) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) self.assertIn('job', rv.json) def test_execute_query_with_params(self): @@ -74,7 +74,7 @@ def test_execute_query_with_params(self): 'query': query, 'max_age': 0}) - self.assertEquals(rv.status_code, 400) + self.assertEqual(rv.status_code, 400) self.assertIn('job', rv.json) rv = self.make_request('post', '/api/query_results', @@ -83,7 +83,7 @@ def test_execute_query_with_params(self): 'parameters': {'param': 1}, 'max_age': 0}) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) self.assertIn('job', rv.json) rv = self.make_request('post', '/api/query_results?p_param=1', @@ -91,7 +91,7 @@ def test_execute_query_with_params(self): 'query': query, 'max_age': 0}) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) self.assertIn('job', rv.json) def test_execute_on_paused_data_source(self): @@ -102,7 +102,7 @@ def test_execute_on_paused_data_source(self): 'query': 'SELECT 1', 'max_age': 0}) - self.assertEquals(rv.status_code, 400) + self.assertEqual(rv.status_code, 400) self.assertNotIn('query_result', rv.json) self.assertIn('job', rv.json) @@ -111,7 +111,7 @@ def test_execute_without_data_source(self): data={'query': 'SELECT 1', 'max_age': 0}) - self.assertEquals(rv.status_code, 401) + self.assertEqual(rv.status_code, 401) self.assertDictEqual(rv.json, error_messages['select_data_source'][0]) @@ -121,28 +121,28 @@ def test_has_no_access_to_data_source(self): query_result = self.factory.create_query_result(data_source=ds) rv = self.make_request('get', '/api/query_results/{}'.format(query_result.id)) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) def test_has_view_only_access_to_data_source(self): ds = self.factory.create_data_source(group=self.factory.org.default_group, view_only=True) query_result = self.factory.create_query_result(data_source=ds) rv = self.make_request('get', '/api/query_results/{}'.format(query_result.id)) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) def test_has_full_access_to_data_source(self): ds = self.factory.create_data_source(group=self.factory.org.default_group, view_only=False) query_result = self.factory.create_query_result(data_source=ds) rv = self.make_request('get', '/api/query_results/{}'.format(query_result.id)) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) def test_execute_new_query(self): query = self.factory.create_query() rv = self.make_request('post', '/api/queries/{}/results'.format(query.id), data={'parameters': {}}) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) self.assertIn('job', rv.json) def test_execute_but_has_no_access_to_data_source(self): @@ -150,7 +150,7 @@ def test_execute_but_has_no_access_to_data_source(self): query = self.factory.create_query(data_source=ds) rv = self.make_request('post', '/api/queries/{}/results'.format(query.id)) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) self.assertDictEqual(rv.json, error_messages['no_permission'][0]) def test_execute_with_no_parameter_values(self): @@ -158,7 +158,7 @@ def test_execute_with_no_parameter_values(self): rv = self.make_request('post', '/api/queries/{}/results'.format(query.id)) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) self.assertIn('job', rv.json) def test_prevents_execution_of_unsafe_queries_on_view_only_data_sources(self): @@ -166,7 +166,7 @@ def test_prevents_execution_of_unsafe_queries_on_view_only_data_sources(self): query = self.factory.create_query(data_source=ds, options={"parameters": [{"name": "foo", "type": "text"}]}) rv = self.make_request('post', '/api/queries/{}/results'.format(query.id), data={"parameters": {}}) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) self.assertDictEqual(rv.json, error_messages['unsafe_on_view_only'][0]) def test_allows_execution_of_safe_queries_on_view_only_data_sources(self): @@ -174,7 +174,7 @@ def test_allows_execution_of_safe_queries_on_view_only_data_sources(self): query = self.factory.create_query(data_source=ds, options={"parameters": [{"name": "foo", "type": "number"}]}) rv = self.make_request('post', '/api/queries/{}/results'.format(query.id), data={"parameters": {}}) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) def test_prevents_execution_of_unsafe_queries_using_api_key(self): ds = self.factory.create_data_source(group=self.factory.org.default_group, view_only=True) @@ -182,7 +182,7 @@ def test_prevents_execution_of_unsafe_queries_using_api_key(self): data = {'parameters': {'foo': 'bar'}} rv = self.make_request('post', '/api/queries/{}/results?api_key={}'.format(query.id, query.api_key), data=data) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) self.assertDictEqual(rv.json, error_messages['unsafe_when_shared'][0]) def test_access_with_query_api_key(self): @@ -191,7 +191,7 @@ def test_access_with_query_api_key(self): query_result = self.factory.create_query_result(data_source=ds, query_text=query.query_text) rv = self.make_request('get', '/api/queries/{}/results/{}.json?api_key={}'.format(query.id, query_result.id, query.api_key), user=False) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) def test_access_with_query_api_key_without_query_result_id(self): ds = self.factory.create_data_source(group=self.factory.org.default_group, view_only=False) @@ -200,7 +200,7 @@ def test_access_with_query_api_key_without_query_result_id(self): query.latest_query_data = query_result rv = self.make_request('get', '/api/queries/{}/results.json?api_key={}'.format(query.id, query.api_key), user=False) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) def test_query_api_key_and_different_query_result(self): ds = self.factory.create_data_source(group=self.factory.org.default_group, view_only=False) @@ -208,7 +208,7 @@ def test_query_api_key_and_different_query_result(self): query_result2 = self.factory.create_query_result(data_source=ds, query_hash='something-different') rv = self.make_request('get', '/api/queries/{}/results/{}.json?api_key={}'.format(query.id, query_result2.id, query.api_key), user=False) - self.assertEquals(rv.status_code, 404) + self.assertEqual(rv.status_code, 404) def test_signed_in_user_and_different_query_result(self): ds2 = self.factory.create_data_source(group=self.factory.org.admin_group, view_only=False) @@ -216,7 +216,7 @@ def test_signed_in_user_and_different_query_result(self): query_result2 = self.factory.create_query_result(data_source=ds2, query_hash='something-different') rv = self.make_request('get', '/api/queries/{}/results/{}.json'.format(query.id, query_result2.id)) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) class TestQueryResultDropdownResource(BaseTestCase): @@ -226,7 +226,7 @@ def test_checks_for_access_to_the_query(self): rv = self.make_request('get', '/api/queries/{}/dropdown'.format(query.id)) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) class TestQueryDropdownsResource(BaseTestCase): @@ -240,7 +240,7 @@ def test_prevents_access_if_unassociated_and_doesnt_have_access(self): rv = self.make_request('get', '/api/queries/{}/dropdowns/{}'.format(query.id, unrelated_dropdown_query.id)) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) def test_allows_access_if_unassociated_but_user_has_access(self): query = self.factory.create_query() @@ -258,7 +258,7 @@ def test_allows_access_if_unassociated_but_user_has_access(self): rv = self.make_request('get', '/api/queries/{}/dropdowns/{}'.format(query.id, unrelated_dropdown_query.id)) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) def test_allows_access_if_associated_and_has_access_to_parent(self): query_result = self.factory.create_query_result() @@ -282,7 +282,7 @@ def test_allows_access_if_associated_and_has_access_to_parent(self): rv = self.make_request('get', '/api/queries/{}/dropdowns/{}'.format(query.id, dropdown_query.id)) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) def test_prevents_access_if_associated_and_doesnt_have_access_to_parent(self): ds2 = self.factory.create_data_source(group=self.factory.org.admin_group, view_only=False) @@ -300,7 +300,7 @@ def test_prevents_access_if_associated_and_doesnt_have_access_to_parent(self): rv = self.make_request('get', '/api/queries/{}/dropdowns/{}'.format(query.id, dropdown_query.id)) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) class TestQueryResultExcelResponse(BaseTestCase): @@ -309,7 +309,7 @@ def test_renders_excel_file(self): query_result = self.factory.create_query_result() rv = self.make_request('get', '/api/queries/{}/results/{}.xlsx'.format(query.id, query_result.id), is_json=False) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) def test_renders_excel_file_when_rows_have_missing_columns(self): query = self.factory.create_query() @@ -326,5 +326,5 @@ def test_renders_excel_file_when_rows_have_missing_columns(self): query_result = self.factory.create_query_result(data=json_dumps(data)) rv = self.make_request('get', '/api/queries/{}/results/{}.xlsx'.format(query.id, query_result.id), is_json=False) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) diff --git a/tests/handlers/test_users.py b/tests/handlers/test_users.py index c5cb375220..0657e7eab1 100644 --- a/tests/handlers/test_users.py +++ b/tests/handlers/test_users.py @@ -112,7 +112,7 @@ class PlainObject(object): def make_request_and_return_ids(self, *args, **kwargs): rv = self.make_request(*args, **kwargs) - return map(lambda u: u['id'], rv.json['results']) + return [user['id'] for user in rv.json['results']] def assertUsersListMatches(self, actual_ids, expected_ids, unexpected_ids): actual_ids = set(actual_ids) @@ -305,7 +305,7 @@ def test_changing_email_does_not_end_current_session(self): current = sess['user_id'] # make sure the session's `user_id` has changed to reflect the new identity, thus not logging the user out - self.assertNotEquals(previous, current) + self.assertNotEqual(previous, current) def test_admin_can_change_user_groups(self): admin_user = self.factory.create_admin() @@ -409,8 +409,8 @@ def test_disabled_user_cannot_login(self): # login handler should not be called login_user_mock.assert_not_called() # check if error is raised - self.assertEquals(rv.status_code, 200) - self.assertIn('Wrong email or password', rv.data) + self.assertEqual(rv.status_code, 200) + self.assertIn('Wrong email or password', rv.data.decode()) def test_disabled_user_should_not_access_api(self): # Note: some API does not require user, so check the one which requires @@ -418,7 +418,7 @@ def test_disabled_user_should_not_access_api(self): # 1. create user; the user should have access to API user = self.factory.create_user() rv = self.make_request('get', '/api/dashboards', user=user) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) # 2. disable user; now API access should be forbidden user.disable() @@ -426,7 +426,7 @@ def test_disabled_user_should_not_access_api(self): self.db.session.commit() rv = self.make_request('get', '/api/dashboards', user=user) - self.assertNotEquals(rv.status_code, 200) + self.assertNotEqual(rv.status_code, 200) def test_disabled_user_should_not_receive_restore_password_email(self): admin_user = self.factory.create_admin() @@ -461,7 +461,7 @@ def test_non_admin_cannot_regenerate_other_user_api_key(self): self.assertEqual(rv.status_code, 200) other_user = models.User.query.get(other_user.id) - self.assertNotEquals(orig_api_key, other_user.api_key) + self.assertNotEqual(orig_api_key, other_user.api_key) def test_admin_can_regenerate_other_user_api_key(self): user1 = self.factory.create_user() @@ -472,7 +472,7 @@ def test_admin_can_regenerate_other_user_api_key(self): self.assertEqual(rv.status_code, 403) user = models.User.query.get(user2.id) - self.assertEquals(orig_user2_api_key, user.api_key) + self.assertEqual(orig_user2_api_key, user.api_key) def test_admin_can_regenerate_api_key_myself(self): admin_user = self.factory.create_admin() @@ -482,7 +482,7 @@ def test_admin_can_regenerate_api_key_myself(self): self.assertEqual(rv.status_code, 200) user = models.User.query.get(admin_user.id) - self.assertNotEquals(orig_api_key, user.api_key) + self.assertNotEqual(orig_api_key, user.api_key) def test_user_can_regenerate_api_key_myself(self): user = self.factory.create_user() @@ -492,4 +492,4 @@ def test_user_can_regenerate_api_key_myself(self): self.assertEqual(rv.status_code, 200) user = models.User.query.get(user.id) - self.assertNotEquals(orig_api_key, user.api_key) + self.assertNotEqual(orig_api_key, user.api_key) diff --git a/tests/handlers/test_visualizations.py b/tests/handlers/test_visualizations.py index e508806dca..e79058147e 100644 --- a/tests/handlers/test_visualizations.py +++ b/tests/handlers/test_visualizations.py @@ -17,7 +17,7 @@ def test_create_visualization(self): rv = self.make_request('post', '/api/visualizations', data=data) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) data.pop('query_id') self.assertDictContainsSubset(data, rv.json) @@ -26,16 +26,16 @@ def test_delete_visualization(self): models.db.session.commit() rv = self.make_request('delete', '/api/visualizations/{}'.format(visualization.id)) - self.assertEquals(rv.status_code, 200) - self.assertEquals(models.Visualization.query.count(), 0) + self.assertEqual(rv.status_code, 200) + self.assertEqual(models.Visualization.query.count(), 0) def test_update_visualization(self): visualization = self.factory.create_visualization() models.db.session.commit() rv = self.make_request('post', '/api/visualizations/{0}'.format(visualization.id), data={'name': 'After Update'}) - self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.json['name'], 'After Update') + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.json['name'], 'After Update') def test_only_owner_collaborator_or_admin_can_create_visualization(self): query = self.factory.create_query() @@ -55,17 +55,17 @@ def test_only_owner_collaborator_or_admin_can_create_visualization(self): } rv = self.make_request('post', '/api/visualizations', data=data, user=admin) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) rv = self.make_request('post', '/api/visualizations', data=data, user=other_user) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) self.make_request('post', '/api/queries/{}/acl'.format(query.id), data={'access_type': 'modify', 'user_id': other_user.id}) rv = self.make_request('post', '/api/visualizations', data=data, user=other_user) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) rv = self.make_request('post', '/api/visualizations', data=data, user=admin_from_diff_org) - self.assertEquals(rv.status_code, 404) + self.assertEqual(rv.status_code, 404) def test_only_owner_collaborator_or_admin_can_edit_visualization(self): vis = self.factory.create_visualization() @@ -82,17 +82,17 @@ def test_only_owner_collaborator_or_admin_can_edit_visualization(self): models.db.session.refresh(admin_from_diff_org) rv = self.make_request('post', path, user=admin, data=data) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) rv = self.make_request('post', path, user=other_user, data=data) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) self.make_request('post', '/api/queries/{}/acl'.format(vis.query_id), data={'access_type': 'modify', 'user_id': other_user.id}) rv = self.make_request('post', path, user=other_user, data=data) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) rv = self.make_request('post', path, user=admin_from_diff_org, data=data) - self.assertEquals(rv.status_code, 404) + self.assertEqual(rv.status_code, 404) def test_only_owner_collaborator_or_admin_can_delete_visualization(self): vis = self.factory.create_visualization() @@ -108,26 +108,26 @@ def test_only_owner_collaborator_or_admin_can_delete_visualization(self): models.db.session.refresh(other_user) models.db.session.refresh(admin_from_diff_org) rv = self.make_request('delete', path, user=admin) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) vis = self.factory.create_visualization() models.db.session.commit() path = '/api/visualizations/{}'.format(vis.id) rv = self.make_request('delete', path, user=other_user) - self.assertEquals(rv.status_code, 403) + self.assertEqual(rv.status_code, 403) self.make_request('post', '/api/queries/{}/acl'.format(vis.query_id), data={'access_type': 'modify', 'user_id': other_user.id}) rv = self.make_request('delete', path, user=other_user) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) vis = self.factory.create_visualization() models.db.session.commit() path = '/api/visualizations/{}'.format(vis.id) rv = self.make_request('delete', path, user=admin_from_diff_org) - self.assertEquals(rv.status_code, 404) + self.assertEqual(rv.status_code, 404) def test_deleting_a_visualization_deletes_dashboard_widgets(self): vis = self.factory.create_visualization() diff --git a/tests/handlers/test_widgets.py b/tests/handlers/test_widgets.py index 702ef6f828..c8e8f6f299 100644 --- a/tests/handlers/test_widgets.py +++ b/tests/handlers/test_widgets.py @@ -20,7 +20,7 @@ def test_create_widget(self): vis = self.factory.create_visualization() rv = self.create_widget(dashboard, vis) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) def test_wont_create_widget_for_visualization_you_dont_have_access_to(self): dashboard = self.factory.create_dashboard() @@ -53,14 +53,14 @@ def test_create_text_widget(self): rv = self.make_request('post', '/api/widgets', data=data) - self.assertEquals(rv.status_code, 200) - self.assertEquals(rv.json['text'], 'Sample text.') + self.assertEqual(rv.status_code, 200) + self.assertEqual(rv.json['text'], 'Sample text.') def test_delete_widget(self): widget = self.factory.create_widget() rv = self.make_request('delete', '/api/widgets/{0}'.format(widget.id)) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) dashboard = models.Dashboard.get_by_slug_and_org(widget.dashboard.slug, widget.dashboard.org) - self.assertEquals(dashboard.widgets.count(), 0) + self.assertEqual(dashboard.widgets.count(), 0) diff --git a/tests/models/test_dashboards.py b/tests/models/test_dashboards.py index cf9595a17d..f168aaab3a 100644 --- a/tests/models/test_dashboards.py +++ b/tests/models/test_dashboards.py @@ -20,11 +20,11 @@ def create_tagged_dashboard(self, tags): return dashboard def test_all_tags(self): - self.create_tagged_dashboard(tags=[u'tag1']) - self.create_tagged_dashboard(tags=[u'tag1', u'tag2']) - self.create_tagged_dashboard(tags=[u'tag1', u'tag2', u'tag3']) + self.create_tagged_dashboard(tags=['tag1']) + self.create_tagged_dashboard(tags=['tag1', 'tag2']) + self.create_tagged_dashboard(tags=['tag1', 'tag2', 'tag3']) self.assertEqual( list(Dashboard.all_tags(self.factory.org, self.factory.user)), - [(u'tag1', 3), (u'tag2', 2), (u'tag3', 1)] + [('tag1', 3), ('tag2', 2), ('tag3', 1)] ) diff --git a/tests/models/test_parameterized_query.py b/tests/models/test_parameterized_query.py index b4ba635b8a..dc2ac0372d 100644 --- a/tests/models/test_parameterized_query.py +++ b/tests/models/test_parameterized_query.py @@ -8,36 +8,36 @@ class TestParameterizedQuery(TestCase): def test_returns_empty_list_for_regular_query(self): - query = ParameterizedQuery(u"SELECT 1") + query = ParameterizedQuery("SELECT 1") self.assertEqual(set([]), query.missing_params) def test_finds_all_params_when_missing(self): - query = ParameterizedQuery(u"SELECT {{param}} FROM {{table}}") + query = ParameterizedQuery("SELECT {{param}} FROM {{table}}") self.assertEqual(set(['param', 'table']), query.missing_params) def test_finds_all_params(self): - query = ParameterizedQuery(u"SELECT {{param}} FROM {{table}}").apply({ + query = ParameterizedQuery("SELECT {{param}} FROM {{table}}").apply({ 'param': 'value', 'table': 'value' }) self.assertEqual(set([]), query.missing_params) def test_deduplicates_params(self): - query = ParameterizedQuery(u"SELECT {{param}}, {{param}} FROM {{table}}").apply({ + query = ParameterizedQuery("SELECT {{param}}, {{param}} FROM {{table}}").apply({ 'param': 'value', 'table': 'value' }) self.assertEqual(set([]), query.missing_params) def test_handles_nested_params(self): - query = ParameterizedQuery(u"SELECT {{param}}, {{param}} FROM {{table}} -- {{#test}} {{nested_param}} {{/test}}").apply({ + query = ParameterizedQuery("SELECT {{param}}, {{param}} FROM {{table}} -- {{#test}} {{nested_param}} {{/test}}").apply({ 'param': 'value', 'table': 'value' }) self.assertEqual(set(['test', 'nested_param']), query.missing_params) def test_handles_objects(self): - query = ParameterizedQuery(u"SELECT * FROM USERS WHERE created_at between '{{ created_at.start }}' and '{{ created_at.end }}'").apply({ + query = ParameterizedQuery("SELECT * FROM USERS WHERE created_at between '{{ created_at.start }}' and '{{ created_at.end }}'").apply({ 'created_at': { 'start': 1, 'end': 2 @@ -63,9 +63,9 @@ def test_validates_text_parameters(self): schema = [{"name": "bar", "type": "text"}] query = ParameterizedQuery("foo {{bar}}", schema) - query.apply({"bar": u"baz"}) + query.apply({"bar": "baz"}) - self.assertEquals("foo baz", query.text) + self.assertEqual("foo baz", query.text) def test_raises_on_invalid_number_parameters(self): schema = [{"name": "bar", "type": "number"}] @@ -80,7 +80,7 @@ def test_validates_number_parameters(self): query.apply({"bar": 7}) - self.assertEquals("foo 7", query.text) + self.assertEqual("foo 7", query.text) def test_coerces_number_parameters(self): schema = [{"name": "bar", "type": "number"}] @@ -88,7 +88,7 @@ def test_coerces_number_parameters(self): query.apply({"bar": "3.14"}) - self.assertEquals("foo 3.14", query.text) + self.assertEqual("foo 3.14", query.text) def test_raises_on_invalid_date_parameters(self): schema = [{"name": "bar", "type": "date"}] @@ -110,7 +110,7 @@ def test_validates_date_parameters(self): query.apply({"bar": "2000-01-01 12:00:00"}) - self.assertEquals("foo 2000-01-01 12:00:00", query.text) + self.assertEqual("foo 2000-01-01 12:00:00", query.text) def test_raises_on_invalid_enum_parameters(self): schema = [{"name": "bar", "type": "enum", "enumOptions": ["baz", "qux"]}] @@ -144,7 +144,7 @@ def test_validates_enum_parameters(self): query.apply({"bar": "baz"}) - self.assertEquals("foo baz", query.text) + self.assertEqual("foo baz", query.text) def test_validates_enum_list_value_parameters(self): schema = [{ @@ -157,7 +157,7 @@ def test_validates_enum_list_value_parameters(self): query.apply({"bar": ["qux", "baz"]}) - self.assertEquals("foo 'qux','baz'", query.text) + self.assertEqual("foo 'qux','baz'", query.text) @patch('redash.models.parameterized_query.dropdown_values', return_value=[{"value": "1"}]) def test_validation_accepts_integer_values_for_dropdowns(self, _): @@ -166,7 +166,7 @@ def test_validation_accepts_integer_values_for_dropdowns(self, _): query.apply({"bar": 1}) - self.assertEquals("foo 1", query.text) + self.assertEqual("foo 1", query.text) @patch('redash.models.parameterized_query.dropdown_values') def test_raises_on_invalid_query_parameters(self, _): @@ -191,7 +191,7 @@ def test_validates_query_parameters(self, _): query.apply({"bar": "baz"}) - self.assertEquals("foo baz", query.text) + self.assertEqual("foo baz", query.text) def test_raises_on_invalid_date_range_parameters(self): schema = [{"name": "bar", "type": "date-range"}] @@ -206,7 +206,7 @@ def test_validates_date_range_parameters(self): query.apply({"bar": {"start": "2000-01-01 12:00:00", "end": "2000-12-31 12:00:00"}}) - self.assertEquals("foo 2000-01-01 12:00:00 2000-12-31 12:00:00", query.text) + self.assertEqual("foo 2000-01-01 12:00:00 2000-12-31 12:00:00", query.text) def test_raises_on_unexpected_param_types(self): schema = [{"name": "bar", "type": "burrito"}] @@ -238,21 +238,21 @@ def test_is_safe_if_not_expecting_any_parameters(self): "rows": [{"id": 5, "Name": "John", "Value": "John Doe"}]}) def test_dropdown_values_prefers_name_and_value_columns(self, _): values = dropdown_values(1, None) - self.assertEquals(values, [{"name": "John", "value": "John Doe"}]) + self.assertEqual(values, [{"name": "John", "value": "John Doe"}]) @patch('redash.models.parameterized_query._load_result', return_value={ "columns": [{"name": "id"}, {"name": "fish"}, {"name": "poultry"}], "rows": [{"fish": "Clown", "id": 5, "poultry": "Hen"}]}) def test_dropdown_values_compromises_for_first_column(self, _): values = dropdown_values(1, None) - self.assertEquals(values, [{"name": 5, "value": "5"}]) + self.assertEqual(values, [{"name": 5, "value": "5"}]) @patch('redash.models.parameterized_query._load_result', return_value={ "columns": [{"name": "ID"}, {"name": "fish"}, {"name": "poultry"}], "rows": [{"fish": "Clown", "ID": 5, "poultry": "Hen"}]}) def test_dropdown_supports_upper_cased_columns(self, _): values = dropdown_values(1, None) - self.assertEquals(values, [{"name": 5, "value": "5"}]) + self.assertEqual(values, [{"name": 5, "value": "5"}]) @patch('redash.models.Query.get_by_id_and_org', return_value=namedtuple('Query', 'data_source')(None)) def test_dropdown_values_raises_when_query_is_detached_from_data_source(self, _): diff --git a/tests/models/test_queries.py b/tests/models/test_queries.py index f1c45b7bf1..1aaeb64dbc 100644 --- a/tests/models/test_queries.py +++ b/tests/models/test_queries.py @@ -1,5 +1,3 @@ -# encoding: utf8 - from tests import BaseTestCase import datetime from redash.models import Query, QueryResult, Group, Event, db @@ -14,7 +12,7 @@ def test_changing_query_text_changes_hash(self): q.query_text = "SELECT 2;" db.session.flush() - self.assertNotEquals(old_hash, q.query_hash) + self.assertNotEqual(old_hash, q.query_hash) def create_tagged_query(self, tags): ds = self.factory.create_data_source(group=self.factory.default_group) @@ -22,31 +20,31 @@ def create_tagged_query(self, tags): return query def test_all_tags(self): - self.create_tagged_query(tags=[u'tag1']) - self.create_tagged_query(tags=[u'tag1', u'tag2']) - self.create_tagged_query(tags=[u'tag1', u'tag2', u'tag3']) + self.create_tagged_query(tags=['tag1']) + self.create_tagged_query(tags=['tag1', 'tag2']) + self.create_tagged_query(tags=['tag1', 'tag2', 'tag3']) self.assertEqual( list(Query.all_tags(self.factory.user)), - [(u'tag1', 3), (u'tag2', 2), (u'tag3', 1)] + [('tag1', 3), ('tag2', 2), ('tag3', 1)] ) def test_search_finds_in_name(self): - q1 = self.factory.create_query(name=u"Testing seåřċħ") - q2 = self.factory.create_query(name=u"Testing seåřċħing") - q3 = self.factory.create_query(name=u"Testing seå řċħ") - queries = list(Query.search(u"seåřċħ", [self.factory.default_group.id])) + q1 = self.factory.create_query(name="Testing seåřċħ") + q2 = self.factory.create_query(name="Testing seåřċħing") + q3 = self.factory.create_query(name="Testing seå řċħ") + queries = list(Query.search("seåřċħ", [self.factory.default_group.id])) self.assertIn(q1, queries) self.assertIn(q2, queries) self.assertNotIn(q3, queries) def test_search_finds_in_description(self): - q1 = self.factory.create_query(description=u"Testing seåřċħ") - q2 = self.factory.create_query(description=u"Testing seåřċħing") - q3 = self.factory.create_query(description=u"Testing seå řċħ") + q1 = self.factory.create_query(description="Testing seåřċħ") + q2 = self.factory.create_query(description="Testing seåřċħing") + q3 = self.factory.create_query(description="Testing seå řċħ") - queries = Query.search(u"seåřċħ", [self.factory.default_group.id]) + queries = Query.search("seåřċħ", [self.factory.default_group.id]) self.assertIn(q1, queries) self.assertIn(q2, queries) @@ -54,19 +52,19 @@ def test_search_finds_in_description(self): def test_search_finds_in_multi_byte_name_and_description(self): q1 = self.factory.create_query(name="日本語の名前テスト") - q2 = self.factory.create_query(description=u"日本語の説明文テスト") - q3 = self.factory.create_query(description=u"Testing search") + q2 = self.factory.create_query(description="日本語の説明文テスト") + q3 = self.factory.create_query(description="Testing search") - queries = Query.search(u"テスト", [self.factory.default_group.id], multi_byte_search=True) + queries = Query.search("テスト", [self.factory.default_group.id], multi_byte_search=True) self.assertIn(q1, queries) self.assertIn(q2, queries) self.assertNotIn(q3, queries) def test_search_by_id_returns_query(self): - q1 = self.factory.create_query(description=u"Testing search") - q2 = self.factory.create_query(description=u"Testing searching") - q3 = self.factory.create_query(description=u"Testing sea rch") + q1 = self.factory.create_query(description="Testing search") + q2 = self.factory.create_query(description="Testing searching") + q3 = self.factory.create_query(description="Testing sea rch") db.session.flush() queries = Query.search(str(q3.id), [self.factory.default_group.id]) @@ -75,20 +73,20 @@ def test_search_by_id_returns_query(self): self.assertNotIn(q2, queries) def test_search_by_number(self): - q = self.factory.create_query(description=u"Testing search 12345") + q = self.factory.create_query(description="Testing search 12345") db.session.flush() queries = Query.search('12345', [self.factory.default_group.id]) self.assertIn(q, queries) def test_search_respects_groups(self): - other_group = Group(org=self.factory.org, name=u"Other Group") + other_group = Group(org=self.factory.org, name="Other Group") db.session.add(other_group) ds = self.factory.create_data_source(group=other_group) - q1 = self.factory.create_query(description=u"Testing search", data_source=ds) - q2 = self.factory.create_query(description=u"Testing searching") - q3 = self.factory.create_query(description=u"Testing sea rch") + q1 = self.factory.create_query(description="Testing search", data_source=ds) + q2 = self.factory.create_query(description="Testing searching") + q3 = self.factory.create_query(description="Testing sea rch") queries = list(Query.search("Testing", [self.factory.default_group.id])) @@ -430,4 +428,4 @@ def test_doesnt_update_queries_with_different_data_source(self): self.assertEqual(query1.latest_query_data, query_result) self.assertEqual(query2.latest_query_data, query_result) - self.assertNotEqual(query3.latest_query_data, query_result) \ No newline at end of file + self.assertNotEqual(query3.latest_query_data, query_result) diff --git a/tests/models/test_query_results.py b/tests/models/test_query_results.py index 5608c23913..aa6b6adacf 100644 --- a/tests/models/test_query_results.py +++ b/tests/models/test_query_results.py @@ -1,4 +1,3 @@ -#encoding: utf8 import datetime from unittest import TestCase @@ -79,7 +78,7 @@ def test_updating_data_removes_cached_result(self): self.assertDictEqual(p.data, {"test": 1}) p.data = '{"test": 2}' self.assertDictEqual(p.data, {"test": 2}) - + @patch('redash.models.json_loads') def test_calls_json_loads_only_once(self, json_loads_patch): json_loads_patch.return_value = '1' @@ -88,4 +87,4 @@ def test_calls_json_loads_only_once(self, json_loads_patch): p.data = json_data a = p.data b = p.data - json_loads_patch.assert_called_once_with(json_data) \ No newline at end of file + json_loads_patch.assert_called_once_with(json_data) diff --git a/tests/models/test_users.py b/tests/models/test_users.py index f687c454ac..733bbe5afe 100644 --- a/tests/models/test_users.py +++ b/tests/models/test_users.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from tests import BaseTestCase, authenticated_user from redash import redis_connection @@ -13,7 +12,7 @@ def test_default_group_always_added(self): user.update_group_assignments(["g_unknown"]) db.session.refresh(user) - self.assertItemsEqual([user.org.default_group.id], user.group_ids) + self.assertCountEqual([user.org.default_group.id], user.group_ids) def test_update_group_assignments(self): user = self.factory.user @@ -22,34 +21,34 @@ def test_update_group_assignments(self): user.update_group_assignments(["g1"]) db.session.refresh(user) - self.assertItemsEqual([user.org.default_group.id, new_group.id], user.group_ids) + self.assertCountEqual([user.org.default_group.id, new_group.id], user.group_ids) class TestUserFindByEmail(BaseTestCase): def test_finds_users(self): - user = self.factory.create_user(email=u'test@example.com') - user2 = self.factory.create_user(email=u'test@example.com', org=self.factory.create_org()) + user = self.factory.create_user(email='test@example.com') + user2 = self.factory.create_user(email='test@example.com', org=self.factory.create_org()) users = User.find_by_email(user.email) self.assertIn(user, users) self.assertIn(user2, users) def test_finds_users_case_insensitive(self): - user = self.factory.create_user(email=u'test@example.com') + user = self.factory.create_user(email='test@example.com') - users = User.find_by_email(u'test@EXAMPLE.com') + users = User.find_by_email('test@EXAMPLE.com') self.assertIn(user, users) class TestUserGetByEmailAndOrg(BaseTestCase): def test_get_user_by_email_and_org(self): - user = self.factory.create_user(email=u'test@example.com') + user = self.factory.create_user(email='test@example.com') found_user = User.get_by_email_and_org(user.email, user.org) self.assertEqual(user, found_user) def test_get_user_by_email_and_org_case_insensitive(self): - user = self.factory.create_user(email=u'test@example.com') + user = self.factory.create_user(email='test@example.com') found_user = User.get_by_email_and_org("TEST@example.com", user.org) self.assertEqual(user, found_user) @@ -57,9 +56,9 @@ def test_get_user_by_email_and_org_case_insensitive(self): class TestUserSearch(BaseTestCase): def test_non_unicode_search_string(self): - user = self.factory.create_user(name=u'אריק') + user = self.factory.create_user(name='אריק') - assert user in User.search(User.all(user.org), term=u'א') + assert user in User.search(User.all(user.org), term='א') class TestUserRegenerateApiKey(BaseTestCase): @@ -70,7 +69,7 @@ def test_regenerate_api_key(self): # check committed by research user = User.query.get(user.id) - self.assertNotEquals(user.api_key, before_api_key) + self.assertNotEqual(user.api_key, before_api_key) class TestUserDetail(BaseTestCase): diff --git a/tests/query_runner/test_drill.py b/tests/query_runner/test_drill.py index 97d45d3f9f..9cfe7f1ecc 100644 --- a/tests/query_runner/test_drill.py +++ b/tests/query_runner/test_drill.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import datetime from unittest import TestCase @@ -16,7 +15,7 @@ def test_converts_booleans(self): self.assertEqual(convert_type('FALSE', TYPE_BOOLEAN), False) def test_converts_strings(self): - self.assertEqual(convert_type(u'Текст', TYPE_STRING), u'Текст') + self.assertEqual(convert_type('Текст', TYPE_STRING), 'Текст') self.assertEqual(convert_type(None, TYPE_STRING), '') self.assertEqual(convert_type('', TYPE_STRING), '') self.assertEqual(convert_type('redash', TYPE_STRING), 'redash') diff --git a/tests/query_runner/test_google_spreadsheets.py b/tests/query_runner/test_google_spreadsheets.py index 4848af0af3..ad236b883d 100644 --- a/tests/query_runner/test_google_spreadsheets.py +++ b/tests/query_runner/test_google_spreadsheets.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import datetime from unittest import TestCase @@ -11,7 +10,7 @@ class TestValueEvalList(TestCase): def test_handles_unicode(self): - values = [u'יוניקוד', 'test', 'value'] + values = ['יוניקוד', 'test', 'value'] self.assertEqual(values, _value_eval_list(values, [TYPE_STRING]*len(values))) def test_handles_boolean(self): @@ -66,7 +65,7 @@ def test_parse_worksheet_with_duplicate_column_names(self): worksheet = [['Column', 'Another Column', 'Column'], ['A', 'TRUE', '1'], ['B', 'FALSE', '2'], ['C', 'TRUE', '3'], ['D', 'FALSE', '4']] parsed = parse_worksheet(worksheet) - columns = map(lambda c: c['name'], parsed['columns']) + columns = [column['name'] for column in parsed['columns']] self.assertEqual('Column', columns[0]) self.assertEqual('Another Column', columns[1]) self.assertEqual('Column1', columns[2]) diff --git a/tests/query_runner/test_http.py b/tests/query_runner/test_http.py index 091d891812..c410e134f4 100644 --- a/tests/query_runner/test_http.py +++ b/tests/query_runner/test_http.py @@ -35,7 +35,7 @@ def test_get_auth_empty(self): def test_get_auth_empty_requires_authentication(self): query_runner = RequiresAuthQueryRunner({}) - self.assertRaisesRegexp( + self.assertRaisesRegex( ValueError, "Username and Password required", query_runner.get_auth @@ -128,7 +128,7 @@ def test_get_response_generic_exception(self, mock_get): url = 'https://example.com/' query_runner = BaseHTTPQueryRunner({}) - self.assertRaisesRegexp( + self.assertRaisesRegex( ValueError, exception_message, query_runner.get_response, diff --git a/tests/query_runner/test_query_results.py b/tests/query_runner/test_query_results.py index 8c3da3e387..30662a5c7d 100644 --- a/tests/query_runner/test_query_results.py +++ b/tests/query_runner/test_query_results.py @@ -12,19 +12,19 @@ class TestExtractQueryIds(TestCase): def test_works_with_simple_query(self): query = "SELECT 1" - self.assertEquals([], extract_query_ids(query)) + self.assertEqual([], extract_query_ids(query)) def test_finds_queries_to_load(self): query = "SELECT * FROM query_123" - self.assertEquals([123], extract_query_ids(query)) + self.assertEqual([123], extract_query_ids(query)) def test_finds_queries_in_joins(self): query = "SELECT * FROM query_123 JOIN query_4566" - self.assertEquals([123, 4566], extract_query_ids(query)) + self.assertEqual([123, 4566], extract_query_ids(query)) def test_finds_queries_with_whitespace_characters(self): query = "SELECT * FROM query_123 a JOIN\tquery_4566 b ON a.id=b.parent_id JOIN\r\nquery_78 c ON b.id=c.parent_id" - self.assertEquals([123, 4566, 78], extract_query_ids(query)) + self.assertEqual([123, 4566, 78], extract_query_ids(query)) class TestCreateTable(TestCase): @@ -136,12 +136,12 @@ def test_creates_table_with_non_ascii_in_column_name(self): connection = sqlite3.connect(':memory:') results = { 'columns': [{ - 'name': u'\xe4' + 'name': '\xe4' }, { 'name': 'test2' }], 'rows': [{ - u'\xe4': 1, + '\xe4': 1, 'test2': 2 }] } @@ -169,7 +169,7 @@ def test_loads_results(self): } table_name = 'query_123' create_table(connection, table_name, results) - self.assertEquals( + self.assertEqual( len(list(connection.execute('SELECT * FROM query_123'))), 2) def test_loads_list_and_dict_results(self): @@ -185,7 +185,7 @@ def test_loads_list_and_dict_results(self): } table_name = 'query_123' create_table(connection, table_name, results) - self.assertEquals( + self.assertEqual( len(list(connection.execute('SELECT * FROM query_123'))), 2) @@ -209,7 +209,7 @@ def test_returns_query(self): user = self.factory.create_user() loaded = _load_query(user, query.id) - self.assertEquals(query, loaded) + self.assertEqual(query, loaded) def test_returns_query_when_user_has_view_only_access(self): ds = self.factory.create_data_source( @@ -218,27 +218,27 @@ def test_returns_query_when_user_has_view_only_access(self): user = self.factory.create_user() loaded = _load_query(user, query.id) - self.assertEquals(query, loaded) + self.assertEqual(query, loaded) class TestExtractCachedQueryIds(TestCase): def test_works_with_simple_query(self): query = "SELECT 1" - self.assertEquals([], extract_cached_query_ids(query)) + self.assertEqual([], extract_cached_query_ids(query)) def test_finds_queries_to_load(self): query = "SELECT * FROM cached_query_123" - self.assertEquals([123], extract_cached_query_ids(query)) + self.assertEqual([123], extract_cached_query_ids(query)) def test_finds_queries_in_joins(self): query = "SELECT * FROM cached_query_123 JOIN cached_query_4566" - self.assertEquals([123, 4566], extract_cached_query_ids(query)) + self.assertEqual([123, 4566], extract_cached_query_ids(query)) def test_finds_queries_with_whitespace_characters(self): query = "SELECT * FROM cached_query_123 a JOIN\tcached_query_4566 b ON a.id=b.parent_id JOIN\r\ncached_query_78 c ON b.id=c.parent_id" - self.assertEquals([123, 4566, 78], extract_cached_query_ids(query)) + self.assertEqual([123, 4566, 78], extract_cached_query_ids(query)) class TestFixColumnName(TestCase): def test_fix_column_name(self): - self.assertEquals(u'"a_b_c_d"', fix_column_name("a:b.c d")) + self.assertEqual('"a_b_c_d"', fix_column_name("a:b.c d")) diff --git a/tests/query_runner/test_utils.py b/tests/query_runner/test_utils.py index 1500fda20b..d2286702ec 100644 --- a/tests/query_runner/test_utils.py +++ b/tests/query_runner/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from unittest import TestCase from redash.query_runner import TYPE_DATETIME, TYPE_FLOAT, TYPE_INTEGER, TYPE_BOOLEAN, TYPE_STRING, guess_type @@ -6,7 +5,7 @@ class TestGuessType(TestCase): def test_handles_unicode(self): - self.assertEqual(guess_type(u'Текст'), TYPE_STRING) + self.assertEqual(guess_type('Текст'), TYPE_STRING) def test_detects_booleans(self): self.assertEqual(guess_type('true'), TYPE_BOOLEAN) diff --git a/tests/serializers/test_query_results.py b/tests/serializers/test_query_results.py index 3f9e7d0381..95b4f72724 100644 --- a/tests/serializers/test_query_results.py +++ b/tests/serializers/test_query_results.py @@ -1,6 +1,6 @@ import datetime import csv -import cStringIO +import io from tests import BaseTestCase @@ -16,9 +16,9 @@ {"datetime": None, "bool": None, "date": None}, {"datetime": 459, "bool": None, "date": 123}, {"datetime": "459", "bool": None, "date": "123"}, - ], + ], "columns": [ - {"friendly_name": "bool", "type": "boolean", "name": "bool"}, + {"friendly_name": "bool", "type": "boolean", "name": "bool"}, {"friendly_name": "date", "type": "datetime", "name": "datetime"}, {"friendly_name": "date", "type": "date", "name": "date"} ] @@ -44,7 +44,7 @@ def get_csv_content(self): def test_serializes_booleans_correctly(self): with self.app.test_request_context('/'): - parsed = csv.DictReader(cStringIO.StringIO(self.get_csv_content())) + parsed = csv.DictReader(io.StringIO(self.get_csv_content())) rows = list(parsed) self.assertEqual(rows[0]['bool'], 'true') @@ -53,7 +53,7 @@ def test_serializes_booleans_correctly(self): def test_serializes_datatime_with_correct_format(self): with self.app.test_request_context('/'): - parsed = csv.DictReader(cStringIO.StringIO(self.get_csv_content())) + parsed = csv.DictReader(io.StringIO(self.get_csv_content())) rows = list(parsed) self.assertEqual(rows[0]['datetime'], '26/05/19 12:39') @@ -65,7 +65,7 @@ def test_serializes_datatime_with_correct_format(self): def test_serializes_datatime_as_is_in_case_of_error(self): with self.app.test_request_context('/'): - parsed = csv.DictReader(cStringIO.StringIO(self.get_csv_content())) + parsed = csv.DictReader(io.StringIO(self.get_csv_content())) rows = list(parsed) self.assertEqual(rows[3]['datetime'], '459') diff --git a/tests/test_authentication.py b/tests/test_authentication.py index a3915d8f85..4023b7667c 100644 --- a/tests/test_authentication.py +++ b/tests/test_authentication.py @@ -142,14 +142,14 @@ def test_prefers_api_key_over_session_user_id(self): class TestCreateAndLoginUser(BaseTestCase): def test_logins_valid_user(self): - user = self.factory.create_user(email=u'test@example.com') + user = self.factory.create_user(email='test@example.com') with patch('redash.authentication.login_user') as login_user_mock: create_and_login_user(self.factory.org, user.name, user.email) login_user_mock.assert_called_once_with(user, remember=True) def test_creates_vaild_new_user(self): - email = u'test@example.com' + email = 'test@example.com' name = 'Test User' with patch('redash.authentication.login_user') as login_user_mock: @@ -160,7 +160,7 @@ def test_creates_vaild_new_user(self): self.assertEqual(user.email, email) def test_updates_user_name(self): - user = self.factory.create_user(email=u'test@example.com') + user = self.factory.create_user(email='test@example.com') with patch('redash.authentication.login_user') as login_user_mock: create_and_login_user(self.factory.org, "New Name", user.email) @@ -169,16 +169,16 @@ def test_updates_user_name(self): class TestVerifyProfile(BaseTestCase): def test_no_domain_allowed_for_org(self): - profile = dict(email=u'arik@example.com') + profile = dict(email='arik@example.com') self.assertFalse(verify_profile(self.factory.org, profile)) def test_domain_not_in_org_domains_list(self): - profile = dict(email=u'arik@example.com') + profile = dict(email='arik@example.com') self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.org'] self.assertFalse(verify_profile(self.factory.org, profile)) def test_domain_in_org_domains_list(self): - profile = dict(email=u'arik@example.com') + profile = dict(email='arik@example.com') self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.com'] self.assertTrue(verify_profile(self.factory.org, profile)) @@ -186,14 +186,14 @@ def test_domain_in_org_domains_list(self): self.assertTrue(verify_profile(self.factory.org, profile)) def test_org_in_public_mode_accepts_any_domain(self): - profile = dict(email=u'arik@example.com') + profile = dict(email='arik@example.com') self.factory.org.settings[models.Organization.SETTING_IS_PUBLIC] = True self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = [] self.assertTrue(verify_profile(self.factory.org, profile)) def test_user_not_in_domain_but_account_exists(self): - profile = dict(email=u'arik@example.com') - self.factory.create_user(email=u'arik@example.com') + profile = dict(email='arik@example.com') + self.factory.create_user(email='arik@example.com') self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.org'] self.assertTrue(verify_profile(self.factory.org, profile)) @@ -220,15 +220,15 @@ def test_no_next_param(self): def test_simple_path_in_next_param(self): response = self.post_request('/login?next=queries', data={'email': self.user.email, 'password': self.password}, org=self.factory.org) - self.assertEqual(response.location, 'http://localhost/queries') + self.assertEqual(response.location, 'http://localhost/default/queries') def test_starts_scheme_url_in_next_param(self): response = self.post_request('/login?next=https://redash.io', data={'email': self.user.email, 'password': self.password}, org=self.factory.org) - self.assertEqual(response.location, 'http://localhost/') + self.assertEqual(response.location, 'http://localhost/default/') def test_without_scheme_url_in_next_param(self): response = self.post_request('/login?next=//redash.io', data={'email': self.user.email, 'password': self.password}, org=self.factory.org) - self.assertEqual(response.location, 'http://localhost/') + self.assertEqual(response.location, 'http://localhost/default/') def test_without_scheme_with_path_url_in_next_param(self): response = self.post_request('/login?next=//localhost/queries', data={'email': self.user.email, 'password': self.password}, org=self.factory.org) @@ -279,7 +279,7 @@ def assert_correct_user_attributes(self, user, email='test@example.com', name='t self.assertEqual(user.email, email) self.assertEqual(user.name, name) self.assertEqual(user.org, org or self.factory.org) - self.assertItemsEqual(user.group_ids, groups) + self.assertCountEqual(user.group_ids, groups) def get_test_user(self, email='test@example.com', org=None): """Helper to fetch an user from the database.""" diff --git a/tests/test_cli.py b/tests/test_cli.py index 25ab3ef5c9..25e7be08ee 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -12,7 +12,7 @@ class DataSourceCommandTests(BaseTestCase): def test_interactive_new(self): runner = CliRunner() - pg_i = query_runners.keys().index('pg') + 1 + pg_i = list(query_runners.keys()).index('pg') + 1 result = runner.invoke( manager, ['ds', 'new'], @@ -221,7 +221,7 @@ def test_list(self): self.factory.create_group(name='bgroup', permissions=['list_dashboards']) self.factory.create_user(name='Fred Foobar', - email=u'foobar@example.com', + email='foobar@example.com', org=self.factory.org, group_ids=[self.factory.default_group.id]) @@ -331,7 +331,7 @@ def test_create_basic(self): input="password1\npassword1\n") self.assertFalse(result.exception) self.assertEqual(result.exit_code, 0) - u = User.query.filter(User.email == u"foobar@example.com").first() + u = User.query.filter(User.email == "foobar@example.com").first() self.assertEqual(u.name, "Fred Foobar") self.assertTrue(u.verify_password('password1')) self.assertEqual(u.group_ids, [u.org.default_group.id]) @@ -343,7 +343,7 @@ def test_create_admin(self): '--password', 'password1', '--admin']) self.assertFalse(result.exception) self.assertEqual(result.exit_code, 0) - u = User.query.filter(User.email == u"foobar@example.com").first() + u = User.query.filter(User.email == "foobar@example.com").first() self.assertEqual(u.name, "Fred Foobar") self.assertTrue(u.verify_password('password1')) self.assertEqual(u.group_ids, [u.org.default_group.id, @@ -355,67 +355,67 @@ def test_create_googleauth(self): manager, ['users', 'create', 'foobar@example.com', 'Fred Foobar', '--google']) self.assertFalse(result.exception) self.assertEqual(result.exit_code, 0) - u = User.query.filter(User.email == u"foobar@example.com").first() + u = User.query.filter(User.email == "foobar@example.com").first() self.assertEqual(u.name, "Fred Foobar") self.assertIsNone(u.password_hash) self.assertEqual(u.group_ids, [u.org.default_group.id]) def test_create_bad(self): - self.factory.create_user(email=u'foobar@example.com') + self.factory.create_user(email='foobar@example.com') runner = CliRunner() result = runner.invoke( - manager, ['users', 'create', u'foobar@example.com', 'Fred Foobar'], + manager, ['users', 'create', 'foobar@example.com', 'Fred Foobar'], input="password1\npassword1\n") self.assertTrue(result.exception) self.assertEqual(result.exit_code, 1) self.assertIn('Failed', result.output) def test_delete(self): - self.factory.create_user(email=u'foobar@example.com') + self.factory.create_user(email='foobar@example.com') ucount = User.query.count() runner = CliRunner() result = runner.invoke(manager, ['users', 'delete', 'foobar@example.com']) self.assertFalse(result.exception) self.assertEqual(result.exit_code, 0) self.assertEqual(User.query.filter(User.email == - u"foobar@example.com").count(), 0) + "foobar@example.com").count(), 0) self.assertEqual(User.query.count(), ucount - 1) def test_delete_bad(self): ucount = User.query.count() runner = CliRunner() - result = runner.invoke(manager, ['users', 'delete', u'foobar@example.com']) + result = runner.invoke(manager, ['users', 'delete', 'foobar@example.com']) self.assertIn('Deleted 0 users', result.output) self.assertEqual(User.query.count(), ucount) def test_password(self): - self.factory.create_user(email=u'foobar@example.com') + self.factory.create_user(email='foobar@example.com') runner = CliRunner() - result = runner.invoke(manager, ['users', 'password', u'foobar@example.com', 'xyzzy']) + result = runner.invoke(manager, ['users', 'password', 'foobar@example.com', 'xyzzy']) self.assertFalse(result.exception) self.assertEqual(result.exit_code, 0) - u = User.query.filter(User.email == u"foobar@example.com").first() + u = User.query.filter(User.email == "foobar@example.com").first() self.assertTrue(u.verify_password('xyzzy')) def test_password_bad(self): runner = CliRunner() - result = runner.invoke(manager, ['users', 'password', u'foobar@example.com', 'xyzzy']) + result = runner.invoke(manager, ['users', 'password', 'foobar@example.com', 'xyzzy']) self.assertTrue(result.exception) self.assertEqual(result.exit_code, 1) self.assertIn('not found', result.output) def test_password_bad_org(self): runner = CliRunner() - result = runner.invoke(manager, ['users', 'password', u'foobar@example.com', 'xyzzy', '--org', 'default']) + result = runner.invoke(manager, ['users', 'password', 'foobar@example.com', 'xyzzy', '--org', 'default']) self.assertTrue(result.exception) self.assertEqual(result.exit_code, 1) self.assertIn('not found', result.output) def test_invite(self): - admin = self.factory.create_user(email=u'redash-admin@example.com') + admin = self.factory.create_user(email='redash-admin@example.com') runner = CliRunner() with mock.patch('redash.cli.users.invite_user') as iu: - result = runner.invoke(manager, ['users', 'invite', u'foobar@example.com', 'Fred Foobar', u'redash-admin@example.com']) + result = runner.invoke(manager, ['users', 'invite', 'foobar@example.com', 'Fred Foobar', 'redash-admin@example.com']) self.assertFalse(result.exception) self.assertEqual(result.exit_code, 0) self.assertTrue(iu.called) @@ -427,15 +427,15 @@ def test_invite(self): def test_list(self): self.factory.create_user(name='Fred Foobar', - email=u'foobar@example.com', + email='foobar@example.com', org=self.factory.org) self.factory.create_user(name='William Foobar', - email=u'william@example.com', + email='william@example.com', org=self.factory.org) self.factory.create_user(name='Andrew Foobar', - email=u'andrew@example.com', + email='andrew@example.com', org=self.factory.org) runner = CliRunner() @@ -469,11 +469,11 @@ def test_list(self): def test_grant_admin(self): u = self.factory.create_user(name='Fred Foobar', - email=u'foobar@example.com', + email='foobar@example.com', org=self.factory.org, group_ids=[self.factory.default_group.id]) runner = CliRunner() - result = runner.invoke(manager, ['users', 'grant_admin', u'foobar@example.com']) + result = runner.invoke(manager, ['users', 'grant_admin', 'foobar@example.com']) self.assertFalse(result.exception) self.assertEqual(result.exit_code, 0) db.session.add(u) diff --git a/tests/test_handlers.py b/tests/test_handlers.py index e9f835becc..7d8e872617 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -10,12 +10,12 @@ class AuthenticationTestMixin(object): def test_returns_404_when_not_unauthenticated(self): for path in self.paths: rv = self.client.get(path) - self.assertEquals(404, rv.status_code) + self.assertEqual(404, rv.status_code) def test_returns_content_when_authenticated(self): for path in self.paths: rv = self.make_request('get', path, is_json=False) - self.assertEquals(200, rv.status_code) + self.assertEqual(200, rv.status_code) class TestAuthentication(BaseTestCase): @@ -25,26 +25,26 @@ def test_responds_with_success_for_signed_in_user(self): sess['user_id'] = self.factory.user.get_id() rv = self.client.get("/default/") - self.assertEquals(200, rv.status_code) + self.assertEqual(200, rv.status_code) def test_redirects_for_nonsigned_in_user(self): rv = self.client.get("/default/") - self.assertEquals(302, rv.status_code) - + self.assertEqual(302, rv.status_code) + def test_redirects_for_invalid_session_identifier(self): with self.client as c: with c.session_transaction() as sess: sess['user_id'] = 100 rv = self.client.get("/default/") - self.assertEquals(302, rv.status_code) + self.assertEqual(302, rv.status_code) class PingTest(BaseTestCase): def test_ping(self): rv = self.client.get('/ping') - self.assertEquals(200, rv.status_code) - self.assertEquals('PONG.', rv.data) + self.assertEqual(200, rv.status_code) + self.assertEqual(b'PONG.', rv.data) class IndexTest(BaseTestCase): @@ -55,12 +55,12 @@ def setUp(self): def test_redirect_to_login_when_not_authenticated(self): for path in self.paths: rv = self.client.get(path) - self.assertEquals(302, rv.status_code) + self.assertEqual(302, rv.status_code) def test_returns_content_when_authenticated(self): for path in self.paths: rv = self.make_request('get', path, org=False, is_json=False) - self.assertEquals(200, rv.status_code) + self.assertEqual(200, rv.status_code) class StatusTest(BaseTestCase): @@ -92,7 +92,7 @@ def setUp(self): def test_get_login_form(self): rv = self.client.get('/default/login') - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) def test_get_login_form_remote_auth(self): """Make sure the remote auth link can be rendered correctly on the @@ -103,9 +103,9 @@ def test_get_login_form_remote_auth(self): settings.REMOTE_USER_LOGIN_ENABLED = True settings.LDAP_LOGIN_ENABLED = True rv = self.client.get('/default/login') - self.assertEquals(rv.status_code, 200) - self.assertIn('/{}/remote_user/login'.format(self.factory.org.slug), rv.data) - self.assertIn('/{}/ldap/login'.format(self.factory.org.slug), rv.data) + self.assertEqual(rv.status_code, 200) + self.assertIn('/{}/remote_user/login'.format(self.factory.org.slug), rv.data.decode()) + self.assertIn('/{}/ldap/login'.format(self.factory.org.slug), rv.data.decode()) finally: settings.REMOTE_USER_LOGIN_ENABLED = old_remote_user_enabled settings.LDAP_LOGIN_ENABLED = old_ldap_login_enabled @@ -113,7 +113,7 @@ def test_get_login_form_remote_auth(self): def test_submit_non_existing_user(self): with patch('redash.handlers.authentication.login_user') as login_user_mock: rv = self.client.post('/default/login', data={'email': 'arik', 'password': 'password'}) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) self.assertFalse(login_user_mock.called) def test_submit_correct_user_and_password(self): @@ -125,7 +125,7 @@ def test_submit_correct_user_and_password(self): with patch('redash.handlers.authentication.login_user') as login_user_mock: rv = self.client.post('/default/login', data={'email': user.email, 'password': 'password'}) - self.assertEquals(rv.status_code, 302) + self.assertEqual(rv.status_code, 302) login_user_mock.assert_called_with(user, remember=False) def test_submit_case_insensitive_user_and_password(self): @@ -137,7 +137,7 @@ def test_submit_case_insensitive_user_and_password(self): with patch('redash.handlers.authentication.login_user') as login_user_mock: rv = self.client.post('/default/login', data={'email': user.email.upper(), 'password': 'password'}) - self.assertEquals(rv.status_code, 302) + self.assertEqual(rv.status_code, 302) login_user_mock.assert_called_with(user, remember=False) def test_submit_correct_user_and_password_and_remember_me(self): @@ -149,7 +149,7 @@ def test_submit_correct_user_and_password_and_remember_me(self): with patch('redash.handlers.authentication.login_user') as login_user_mock: rv = self.client.post('/default/login', data={'email': user.email, 'password': 'password', 'remember': True}) - self.assertEquals(rv.status_code, 302) + self.assertEqual(rv.status_code, 302) login_user_mock.assert_called_with(user, remember=True) def test_submit_correct_user_and_password_with_next(self): @@ -162,14 +162,14 @@ def test_submit_correct_user_and_password_with_next(self): with patch('redash.handlers.authentication.login_user') as login_user_mock: rv = self.client.post('/default/login?next=/test', data={'email': user.email, 'password': 'password'}) - self.assertEquals(rv.status_code, 302) - self.assertEquals(rv.location, 'http://localhost/test') + self.assertEqual(rv.status_code, 302) + self.assertEqual(rv.location, 'http://localhost/test') login_user_mock.assert_called_with(user, remember=False) def test_submit_incorrect_user(self): with patch('redash.handlers.authentication.login_user') as login_user_mock: rv = self.client.post('/default/login', data={'email': 'non-existing', 'password': 'password'}) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) self.assertFalse(login_user_mock.called) def test_submit_incorrect_password(self): @@ -182,7 +182,7 @@ def test_submit_incorrect_password(self): with patch('redash.handlers.authentication.login_user') as login_user_mock: rv = self.client.post('/default/login', data={ 'email': user.email, 'password': 'badbadpassword'}) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) self.assertFalse(login_user_mock.called) def test_submit_empty_password(self): @@ -190,13 +190,13 @@ def test_submit_empty_password(self): with patch('redash.handlers.authentication.login_user') as login_user_mock: rv = self.client.post('/default/login', data={'email': user.email, 'password': ''}) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) self.assertFalse(login_user_mock.called) def test_user_already_loggedin(self): with authenticated_user(self.client), patch('redash.handlers.authentication.login_user') as login_user_mock: rv = self.client.get('/default/login') - self.assertEquals(rv.status_code, 302) + self.assertEqual(rv.status_code, 302) self.assertFalse(login_user_mock.called) @@ -204,7 +204,7 @@ class TestLogout(BaseTestCase): def test_logout_when_not_loggedin(self): with self.app.test_client() as c: rv = c.get('/default/logout') - self.assertEquals(rv.status_code, 302) + self.assertEqual(rv.status_code, 302) self.assertFalse(current_user.is_authenticated) def test_logout_when_loggedin(self): @@ -212,7 +212,7 @@ def test_logout_when_loggedin(self): rv = c.get('/default/') self.assertTrue(current_user.is_authenticated) rv = c.get('/default/logout') - self.assertEquals(rv.status_code, 302) + self.assertEqual(rv.status_code, 302) self.assertFalse(current_user.is_authenticated) diff --git a/tests/test_models.py b/tests/test_models.py index 8ca46f0819..533fd29f46 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -1,4 +1,3 @@ -#encoding: utf8 import calendar import datetime from unittest import TestCase @@ -16,16 +15,16 @@ class DashboardTest(BaseTestCase): def test_appends_suffix_to_slug_when_duplicate(self): d1 = self.factory.create_dashboard() db.session.flush() - self.assertEquals(d1.slug, 'test') + self.assertEqual(d1.slug, 'test') d2 = self.factory.create_dashboard(user=d1.user) db.session.flush() - self.assertNotEquals(d1.slug, d2.slug) + self.assertNotEqual(d1.slug, d2.slug) d3 = self.factory.create_dashboard(user=d1.user) db.session.flush() - self.assertNotEquals(d1.slug, d3.slug) - self.assertNotEquals(d2.slug, d3.slug) + self.assertNotEqual(d1.slug, d3.slug) + self.assertNotEqual(d2.slug, d3.slug) class ShouldScheduleNextTest(TestCase): @@ -158,7 +157,7 @@ def test_outdated_queries_works_with_ttl_based_schedule(self): self.assertIn(query, queries) def test_outdated_queries_works_scheduled_queries_tracker(self): - two_hours_ago = datetime.datetime.now() - datetime.timedelta(hours=2) + two_hours_ago = utcnow() - datetime.timedelta(hours=2) query = self.factory.create_query(schedule={'interval':'3600', 'time': None, 'until':None, 'day_of_week':None}) query_result = self.factory.create_query_result(query=query, retrieved_at=two_hours_ago) query.latest_query_data = query_result @@ -221,8 +220,10 @@ def test_enqueues_query_with_correct_data_source(self): query.latest_query_data = query_result query2.latest_query_data = query_result - self.assertEqual(list(models.Query.outdated_queries()), - [query2, query]) + outdated_queries = models.Query.outdated_queries() + self.assertEqual(len(outdated_queries), 2) + self.assertIn(query, outdated_queries) + self.assertIn(query2, outdated_queries) def test_enqueues_only_for_relevant_data_source(self): """ @@ -294,7 +295,7 @@ def test_archive_query_sets_flag(self): db.session.flush() query.archive() - self.assertEquals(query.is_archived, True) + self.assertEqual(query.is_archived, True) def test_archived_query_doesnt_return_in_all(self): query = self.factory.create_query(schedule={'interval':'1', 'until':None, 'time': None, 'day_of_week':None}) diff --git a/tests/test_utils.py b/tests/test_utils.py index 493d3fcaef..479f26fc7c 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -4,10 +4,6 @@ from redash.utils import (build_url, collect_parameters_from_request, filter_none, json_dumps, generate_token) -try: - buffer -except NameError: - buffer = bytes DummyRequest = namedtuple('DummyRequest', ['host', 'scheme']) @@ -49,10 +45,10 @@ def test_skips_nones(self): class TestJsonDumps(TestCase): def test_handles_binary(self): - self.assertEqual(json_dumps(buffer("test")), '"74657374"') + self.assertEqual(json_dumps(memoryview(b"test")), '"74657374"') class TestGenerateToken(TestCase): def test_format(self): token = generate_token(40) - self.assertRegexpMatches(token, r"[a-zA-Z0-9]{40}") + self.assertRegex(token, r"[a-zA-Z0-9]{40}")