From 5716783f9e6d7509e75df21c99e20fd0018b807b Mon Sep 17 00:00:00 2001 From: Sergio Oliveira Date: Sun, 4 Oct 2020 15:21:38 -0300 Subject: [PATCH 1/4] Generic DB connection and management --- .gitignore | 5 + Dockerfile | 25 +++- MANIFEST.in | 2 + README.md | 120 +++++++++++++++++- docker-compose.yml | 7 +- kanbandash/__init__.py | 0 kanbandash/alembic.ini | 86 +++++++++++++ kanbandash/alembic/README | 1 + kanbandash/alembic/__init__.py | 0 kanbandash/alembic/env.py | 81 ++++++++++++ kanbandash/alembic/script.py.mako | 24 ++++ .../alembic/versions/071e65ae3528_initial.py | 28 ++++ kanbandash/alembic/versions/__init__.py | 0 kanbandash/cli.py | 111 ++++++++++++++++ .../kanban-dashboards.json | 0 models.py => kanbandash/models.py | 4 +- .../populate_with_test_data.py | 4 +- kanbandash/settings.py | 5 + pyproject.toml | 3 + requirements.txt | 5 +- run.py | 43 ------- scripts/docker-run.sh | 3 - .../{run.sh => manage-dashboard-schema.sh} | 2 +- scripts/metabase-import-export.py | 14 +- settings.py | 5 - setup.cfg | 19 +++ setup.py | 3 + 27 files changed, 518 insertions(+), 82 deletions(-) create mode 100644 MANIFEST.in create mode 100644 kanbandash/__init__.py create mode 100644 kanbandash/alembic.ini create mode 100644 kanbandash/alembic/README create mode 100644 kanbandash/alembic/__init__.py create mode 100644 kanbandash/alembic/env.py create mode 100644 kanbandash/alembic/script.py.mako create mode 100644 kanbandash/alembic/versions/071e65ae3528_initial.py create mode 100644 kanbandash/alembic/versions/__init__.py create mode 100644 kanbandash/cli.py rename kanban-dashboards.json => kanbandash/kanban-dashboards.json (100%) rename models.py => kanbandash/models.py (99%) rename populate_with_test_data.py => kanbandash/populate_with_test_data.py (98%) create mode 100644 kanbandash/settings.py create mode 100644 pyproject.toml delete mode 100644 run.py delete mode 100755 scripts/docker-run.sh rename scripts/{run.sh => manage-dashboard-schema.sh} (53%) delete mode 100644 settings.py create mode 100644 setup.cfg create mode 100644 setup.py diff --git a/.gitignore b/.gitignore index a9150bf..6b15c87 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,8 @@ __pycache__ metabase-db/ .DS_Store + +build/ +dist/ + +*.egg-info/ diff --git a/Dockerfile b/Dockerfile index 3faae3f..9f9015d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,21 @@ -FROM python:3-stretch -RUN mkdir /code +# builder image +FROM python:3-alpine as builder +COPY . /code/ + WORKDIR /code -ADD requirements.txt /code/ -RUN pip install -r requirements.txt -ADD . /code/ + +RUN python setup.py bdist_wheel --dist-dir=/tmp/dist/ + +# Final image +FROM python:3-alpine + +ENV KANBANDASH_DATABASE_URL= + +COPY --from=builder /tmp/dist/kanbandash*.whl /tmp/ + +RUN \ + apk add --no-cache postgresql-libs && \ + apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev && \ + pip install /tmp/kanbandash*.whl --no-cache-dir && \ + apk --purge del .build-deps && \ + rm -fR /tmp/kanbandash*.whl diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..5a866bb --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,2 @@ +include kanbandash/alembic.ini +include kanbandash/kanban-dashboards.json diff --git a/README.md b/README.md index 37afa9e..e00ea01 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,4 @@ -metabase-kanban-dashboard -========================= +# metabase-kanban-dashboard The goal of this project is to provide an open-source Kanban metrics dashboard for Metabase. @@ -7,17 +6,113 @@ This still in early stage and that's how the dashboard currently looks like: ![Screen Shot 2020-09-07 at 22 19 59-fullpage](https://user-images.githubusercontent.com/33388/92423867-fbac2380-f158-11ea-9e07-7b5c5d83a9db.png) -Testing with the container and the test data: ---------------------------------------------- +## Getting started + +Before getting started you will need to have a PostgreSQL server and a Metabase instance configured and running. + +The tools included in this repository will create the questions and the dashboards in your metabase instance as well as the database schema you will need. + +It's not in the scope of this project to connect to any project management tool to extract your information (at least not for now). + +Long story short, here is what you will need in order to get your Kanban dashboard up and running: + +1. Create the tables required to store your Kanban data +1. Import the Metabase questions and dashboards +1. Process the data from your project management tool and insert in the database + +In the next sections we'll guide you over each of the next steps. + +If you want to play around with the test docker environment before getting your hands dirty, take a look in the section "[Testing with the container and the test data](#testing-with-the-container-and-the-test-data)". + + +### Installing + +You can install the library by cloning this repository and running `pip install .` inside the repository directory. + +Another option is to just download our docker container using `docker pull cravefood/kanban-dash`. + +In either case you will always have to set the the env var `KANBANDASH_DATABASE_URL`. This is how the tool will know in which database it should connect. The variable should look like this `KANBANDASH_DATABASE_URL=postgresql://:@:/`. + + +### Creating the models + +To create the models you will need to run the command: + +``` +KANBANDASH_DATABASE_URL=postgresql://:@:/ kanban-dash models --create +``` + +Or using the docker container: + +``` +docker run -e KANBANDASH_DATABASE_URL=postgresql://:@:/ cravefood/kanban-dash kanban-dash models --create +``` + + +### Creating the questions and dashboards in Metabase + +Before creating the data in Metabase you will need to configure your Metabase instance to access your Kanban database. You can do that by accessing Settings -> Admin -> Databases (top menu) -> Add database. + +With your database configured you will need to create a new collection: Browse all items -> New collection. +After creating the collection you will have to access it in order to get the collection id (available in the collection URL). + +Now we are ready to run the collection import script: + +``` +./scripts/metabase-import-export.py \ + --username= \ + import \ + --collection-id= \ + --import-file=kanbandash/kanban-dashboards.json +``` + +After running the script you should be able to access the collection and see the imported reports. Select "Kanban" in the dashboards tab to see the dashboard without any data. + +If you want to insert some test data you can that by running: + +``` +KANBANDASH_DATABASE_URL=postgresql://:@:/ kanban-dash generate-test-data +``` + +Or using the docker container: + +``` +docker run -e KANBANDASH_DATABASE_URL=postgresql://:@:/ cravefood/kanban-dash kanban-dash generate-test-data +``` + + +Once you are done playing with the test data you can clean it up using the command: + +``` +KANBANDASH_DATABASE_URL=postgresql://:@:/ models --reset +``` + +Or using the docker container: + +``` +docker run -e KANBANDASH_DATABASE_URL=postgresql://:@:/ cravefood/kanban-dash kanban-dash models --reset +``` + +### Inserting real data + +TODO + + +## Testing with the container and the test data * Start the containers ``` $ docker-compose up ``` -* Run the script to create a populate the test database: +* Run the scripts to create the database schema: +``` +docker-compose run kanban-dash kanban-dash models --create +``` + +* Run the script to populate the database with test data: ``` -$ ./scripts/run.sh populate_with_test_data.py +docker-compose run kanban-dash kanban-dash generate-test-data ``` * Access the local Metabase instance on http://localhost:3000, create a user and password and connect to the testing database (db `kanban_metrics`, hostname `postgres`, username `postgres`, no password). @@ -30,5 +125,16 @@ $ ./scripts/run.sh populate_with_test_data.py --username= \ import \ --collection-id= \ - --import-file=kanban-dashboards.json + --import-file=kanbandash/kanban-dashboards.json +``` + + +## Developing / Contributing + +### Creating a schema migration + +The database migrations are using alembic. If perform a change in the models.py file you will need to create a new database migration to reflect those changes. Usign the docker-compose environment you can do that using the following command: + +``` +docker-compose run kanban-dash alembic revision --autogenerate -m "" ``` diff --git a/docker-compose.yml b/docker-compose.yml index 369db62..5ee266c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -19,11 +19,12 @@ services: - MGID=${GID} - MB_DB_FILE=/metabase.db - kanban-reports: + kanban-dash: build: . - # repo it's just a few scripts. This is a dirty hack to keep the container running - command: bash -c "sleep infinity" depends_on: - postgres volumes: - .:/code + working_dir: /code/kanbandash + environment: + - KANBANDASH_DATABASE_URL=postgresql://postgres@postgres/kanban_metrics diff --git a/kanbandash/__init__.py b/kanbandash/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kanbandash/alembic.ini b/kanbandash/alembic.ini new file mode 100644 index 0000000..df2513a --- /dev/null +++ b/kanbandash/alembic.ini @@ -0,0 +1,86 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat alembic/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# This will be overwritten in env.py +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks=black +# black.type=console_scripts +# black.entrypoint=black +# black.options=-l 79 + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/kanbandash/alembic/README b/kanbandash/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/kanbandash/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/kanbandash/alembic/__init__.py b/kanbandash/alembic/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kanbandash/alembic/env.py b/kanbandash/alembic/env.py new file mode 100644 index 0000000..89968e9 --- /dev/null +++ b/kanbandash/alembic/env.py @@ -0,0 +1,81 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +from kanbandash import models +from kanbandash import settings + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = models.Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +# Replacing some settings from alembic.ini +config.set_main_option("sqlalchemy.url", settings.POSTGRES_DATABASE_URL) + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/kanbandash/alembic/script.py.mako b/kanbandash/alembic/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/kanbandash/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/kanbandash/alembic/versions/071e65ae3528_initial.py b/kanbandash/alembic/versions/071e65ae3528_initial.py new file mode 100644 index 0000000..d562ff6 --- /dev/null +++ b/kanbandash/alembic/versions/071e65ae3528_initial.py @@ -0,0 +1,28 @@ +"""Initial schema migration + +Revision ID: 071e65ae3528 +Revises: +Create Date: 2020-10-04 17:42:09.776281 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "071e65ae3528" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/kanbandash/alembic/versions/__init__.py b/kanbandash/alembic/versions/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kanbandash/cli.py b/kanbandash/cli.py new file mode 100644 index 0000000..d908d82 --- /dev/null +++ b/kanbandash/cli.py @@ -0,0 +1,111 @@ +import argparse +import inspect +import os +import pathlib + +from alembic import command +from alembic.config import Config + +from . import models + +from .populate_with_test_data import generate_data + +SCRIPT_DIR = pathlib.Path(__file__).parent.absolute() + +os.chdir(SCRIPT_DIR) + +ALEMBIC_CFG = Config(pathlib.Path(SCRIPT_DIR, "alembic.ini")) + + +def is_base_subclass(obj): + if obj is not models.Base and inspect.isclass(obj) and issubclass(obj, models.Base): + return True + + +def get_tables(): + obj_names = dir(models) + + objs = [ + getattr(models, obj_name) + for obj_name in obj_names + if not obj_name.startswith("__") + ] + objs = [obj.__table__ for obj in objs if is_base_subclass(obj)] + + return objs + + +def drop_tables(): + tables = get_tables() + models.Base.metadata.drop_all(models.SQLALCHEMY_ENGINE, tables=tables) + + +def create_tables(): + tables = get_tables() + models.Base.metadata.create_all(models.SQLALCHEMY_ENGINE, tables=tables) + migrate_schema() + + +def recreate_tables(): + drop_tables() + create_tables() + + +def migrate_schema(): + command.upgrade(ALEMBIC_CFG, "head") + + +def manage_models(args): + if args.create: + create_tables() + + elif args.reset: + recreate_tables() + + elif args.upgrade: + migrate_schema() + + +def get_argparser(): + parser = argparse.ArgumentParser( + description="Metabase Kanban Dashboard manager", + ) + subparsers = parser.add_subparsers() + + # models subparsers + models = subparsers.add_parser("models") + models_group = models.add_mutually_exclusive_group(required=True) + models_group.add_argument( + "--create", action="store_true", help=("Create the database schema.") + ) + models_group.add_argument( + "--upgrade", + action="store_true", + help=("Update the database schema to the latest version."), + ) + models_group.add_argument( + "--reset", + action="store_true", + help="Destroy and recreate all tables. Note that ALL DATA WILL BE ERASED.", + ) + models.set_defaults(func=manage_models) + + # test-data subparser + test_data = subparsers.add_parser("generate-test-data") + test_data.set_defaults(func=generate_data) + + return parser + + +def main(): + parser = get_argparser() + args = parser.parse_args() + + if hasattr(args, "func"): + args.func(args) + else: + parser.print_usage() + + +if __name__ == "__main__": + main() diff --git a/kanban-dashboards.json b/kanbandash/kanban-dashboards.json similarity index 100% rename from kanban-dashboards.json rename to kanbandash/kanban-dashboards.json diff --git a/models.py b/kanbandash/models.py similarity index 99% rename from models.py rename to kanbandash/models.py index ff25c4b..97beb78 100644 --- a/models.py +++ b/kanbandash/models.py @@ -1,4 +1,3 @@ - from sqlalchemy import ( Boolean, Column, @@ -14,7 +13,7 @@ from sqlalchemy.orm import relationship from sqlalchemy.orm import sessionmaker -import settings +from . import settings # SQLAlchemy SQLALCHEMY_ENGINE = create_engine(settings.POSTGRES_DATABASE_URL, echo=False) @@ -23,7 +22,6 @@ Session = sessionmaker(bind=SQLALCHEMY_ENGINE) - class KanbanClassOfService(Base): __tablename__ = "kanban_class_of_service" diff --git a/populate_with_test_data.py b/kanbandash/populate_with_test_data.py similarity index 98% rename from populate_with_test_data.py rename to kanbandash/populate_with_test_data.py index 89d9a76..a883195 100755 --- a/populate_with_test_data.py +++ b/kanbandash/populate_with_test_data.py @@ -9,7 +9,7 @@ from collections import Counter from itertools import chain -from models import ( +from .models import ( KanbanBoard, KanbanCard, KanbanCardTime, @@ -164,7 +164,7 @@ def generate_kanban_cards(n, m): factories_session.commit() -def generate_data(): +def generate_data(args=None): generate_class_of_services() generate_boards(4) generate_kanban_days() diff --git a/kanbandash/settings.py b/kanbandash/settings.py new file mode 100644 index 0000000..52553e8 --- /dev/null +++ b/kanbandash/settings.py @@ -0,0 +1,5 @@ +import os + +POSTGRES_DATABASE_URL = os.getenv( + "KANBANDASH_DATABASE_URL", "postgresql://postgres@postgres/kanban_metrics" +) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..9787c3b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" diff --git a/requirements.txt b/requirements.txt index 7f576c9..49bcc74 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,2 @@ -psycopg2 -psycopg2-binary -SQLAlchemy +-e . ipdb -factory-boy diff --git a/run.py b/run.py deleted file mode 100644 index f2fa921..0000000 --- a/run.py +++ /dev/null @@ -1,43 +0,0 @@ -import inspect - -import models - -from populate_with_test_data import generate_data - - -def is_base_subclass(obj): - if obj is not models.Base and inspect.isclass(obj) and issubclass(obj, models.Base): - return True - - -def get_tables(): - obj_names = dir(models) - - objs = [ - getattr(models, obj_name) - for obj_name in obj_names - if not obj_name.startswith("__") - ] - objs = [obj.__table__ for obj in objs if is_base_subclass(obj)] - - return objs - - -def drop_tables(): - tables = get_tables() - models.Base.metadata.drop_all(models.SQLALCHEMY_ENGINE, tables=tables) - - -def create_tables(): - tables = get_tables() - models.Base.metadata.create_all(models.SQLALCHEMY_ENGINE, tables=tables) - - -def recreate_table(): - drop_tables() - create_tables() - - -if __name__ == "__main__": - recreate_table() - generate_data() diff --git a/scripts/docker-run.sh b/scripts/docker-run.sh deleted file mode 100755 index 9ed084e..0000000 --- a/scripts/docker-run.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -docker-compose exec kanban-reports /bin/bash -c "$*" diff --git a/scripts/run.sh b/scripts/manage-dashboard-schema.sh similarity index 53% rename from scripts/run.sh rename to scripts/manage-dashboard-schema.sh index 1d78c96..e6d6e77 100755 --- a/scripts/run.sh +++ b/scripts/manage-dashboard-schema.sh @@ -2,4 +2,4 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" -${DIR}/docker-run.sh python3 run.py $@ +${DIR}/docker-run.sh python3 kanbandash/manage-dashboard-schema.py $@ diff --git a/scripts/metabase-import-export.py b/scripts/metabase-import-export.py index 2318515..588d682 100755 --- a/scripts/metabase-import-export.py +++ b/scripts/metabase-import-export.py @@ -3,7 +3,6 @@ import argparse import getpass import json -import os import requests import sys from copy import deepcopy @@ -20,9 +19,12 @@ def call_api(method, uri, json=None, params=None): - url = METABASE_CONFIG['url'] + uri + url = METABASE_CONFIG["url"] + uri response = SESSION.request( - method, url, json=json, params=params, + method, + url, + json=json, + params=params, ) if response.status_code == requests.codes.not_found: print("Not found: {}".format(url)) @@ -211,7 +213,7 @@ def export_collection(collection_id, file_path): def get_db_names(data, source): - return ["{} ({} - {})".format(db['name'], db['id'], source) for db in data] + return ["{} ({} - {})".format(db["name"], db["id"], source) for db in data] def map_databases(exported_databases): @@ -236,7 +238,7 @@ def map_databases(exported_databases): try: if int(selection) in db_ids: - DB_MAPPING[exported_db['id']] = int(selection) + DB_MAPPING[exported_db["id"]] = int(selection) break else: print("\n*** Invalid selection ***\n") @@ -391,7 +393,7 @@ def main(): parser = get_argparser() args = parser.parse_args() - METABASE_CONFIG['url'] = args.url + METABASE_CONFIG["url"] = args.url password = getpass.getpass("Password for user {}: ".format(args.username)) login_response = login(args.username, password) diff --git a/settings.py b/settings.py deleted file mode 100644 index b315940..0000000 --- a/settings.py +++ /dev/null @@ -1,5 +0,0 @@ -import os - -POSTGRES_DATABASE_URL = os.getenv( - "POSTGRES_DATABASE_URL", "postgresql://postgres@postgres/kanban_metrics" -) diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..68e174b --- /dev/null +++ b/setup.cfg @@ -0,0 +1,19 @@ +[metadata] +name = kanbandash +version = 0.1.0 + +[options] +packages = find: + +install_requires = + requests + psycopg2 + SQLAlchemy + alembic + +include_package_data = True + +[options.entry_points] +console_scripts = + kanban-dash = kanbandash.cli:main + diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..b908cbe --- /dev/null +++ b/setup.py @@ -0,0 +1,3 @@ +import setuptools + +setuptools.setup() From 6e48f8e3cf19e28606881b95b5b488c9d88aa187 Mon Sep 17 00:00:00 2001 From: Sergio Oliveira Date: Sun, 8 Nov 2020 12:55:49 -0300 Subject: [PATCH 2/4] Using metabase-import-export from pypi --- README.md | 12 +- scripts/metabase-import-export.py | 412 ------------------------------ setup.cfg | 3 +- 3 files changed, 13 insertions(+), 414 deletions(-) delete mode 100755 scripts/metabase-import-export.py diff --git a/README.md b/README.md index e00ea01..41b761f 100644 --- a/README.md +++ b/README.md @@ -121,7 +121,17 @@ docker-compose run kanban-dash kanban-dash generate-test-data * Run the script to import the Kanban dashboard to the new collection. Make sure to use the proper username and collection-id: ``` -./scripts/metabase-import-export.py \ +metabase-import-export \ + --username= \ + import \ + --collection-id= \ + --import-file=kanbandash/kanban-dashboards.json +``` + +or using our docker container: + +``` +docker run kanban-dash metabase-import-export \ --username= \ import \ --collection-id= \ diff --git a/scripts/metabase-import-export.py b/scripts/metabase-import-export.py deleted file mode 100755 index 588d682..0000000 --- a/scripts/metabase-import-export.py +++ /dev/null @@ -1,412 +0,0 @@ -#!/usr/bin/env python - -import argparse -import getpass -import json -import requests -import sys -from copy import deepcopy - -METABASE_CONFIG = {} - -DATABASES_CACHE = [] -EXPORT_IMPORT_MAPPING = {} -DB_MAPPING = {} -TABLE_MAPPING = {} -FIELD_MAPPING = {} - -SESSION = requests.Session() - - -def call_api(method, uri, json=None, params=None): - url = METABASE_CONFIG["url"] + uri - response = SESSION.request( - method, - url, - json=json, - params=params, - ) - if response.status_code == requests.codes.not_found: - print("Not found: {}".format(url)) - elif not 200 <= response.status_code < 300: - print(response.content.decode("utf-8")) - return response.json() - - -def login(username, password): - login_url = "/api/session" - data = { - "username": username, - "password": password, - "remember": False, - } - return call_api("post", login_url, json=data) - - -def get_card(card_id): - api_card_url = "/api/card/{}".format(card_id) - return call_api("get", api_card_url) - - -def list_collections(): - api_list_collections_url = "/api/collection/" - return call_api("get", api_list_collections_url) - - -def get_collection_items(collection_id): - api_collection_items_url = "/api/collection/{}/items".format(collection_id) - return call_api("get", api_collection_items_url) - - -def get_dashboard(dashboard_id): - api_dashboard_url = "/api/dashboard/{}".format(dashboard_id) - return call_api("get", api_dashboard_url) - - -def list_databases(): - api_db_url = "/api/database/" - return call_api("get", api_db_url) - - -def get_database(db_id): - if DATABASES_CACHE: - return DATABASES_CACHE - - api_db_url = "/api/database/{}".format(db_id) - dbs = call_api("get", api_db_url, params={"include": "tables.fields"}) - DATABASES_CACHE.extend(dbs) - return dbs - - -def get_mapping_db(database_id): - mapped_id = DB_MAPPING.get(database_id) - if mapped_id is None: - print("Database '{}' was not mapped.".format(database_id)) - sys.exit(1) - return mapped_id - - -def create_card(card, **kwargs): - api_card_url = "/api/card" - data = deepcopy(card) - data.update(kwargs) - - data["database_id"] = get_mapping_db(data["database_id"]) - data["dataset_query"]["database"] = get_mapping_db( - data["dataset_query"]["database"], - ) - - if data["table_id"] is not None: - data["table_id"] = TABLE_MAPPING[data["table_id"]] - - if data["dataset_query"]["type"] == "native": - for tag in data["dataset_query"]["native"]["template-tags"].values(): - if "dimension" in tag and tag["dimension"][0] == "field-id": - tag["dimension"][1] = FIELD_MAPPING[tag["dimension"][1]] - - new_card = call_api("post", api_card_url, json=data) - EXPORT_IMPORT_MAPPING[card["id"]] = new_card["id"] - - return new_card - - -def replace_card_ids(obj_dict, id_key="card_id"): - new_obj_dict = deepcopy(obj_dict) - for item in new_obj_dict: - new_card_id = EXPORT_IMPORT_MAPPING[item[id_key]] - item[id_key] = new_card_id - return new_obj_dict - - -def create_dashboard(dashboard, **kwargs): - api_dashboard_url = "/api/dashboard" - data = deepcopy(dashboard) - data.update(kwargs) - - for field in data["param_fields"].values(): - field["table_id"] = TABLE_MAPPING[field["table_id"]] - - new_dashboard = call_api("post", api_dashboard_url, json=data) - - api_add_card_to_dashboard = "{}/{}/cards".format( - api_dashboard_url, - new_dashboard["id"], - ) - for card in dashboard["ordered_cards"]: - if card["card_id"] not in EXPORT_IMPORT_MAPPING: - print( - "Card '{}' was not imported and it's trying " - "to be added to an imported dashboard".format(card["card_id"]) - ) - sys.exit(1) - continue - - new_card_id = EXPORT_IMPORT_MAPPING[card["card_id"]] - parameter_mappings = replace_card_ids(card["parameter_mappings"]) - series = replace_card_ids(card["series"], "id") - - data = { - "cardId": new_card_id, - "parameter_mappings": parameter_mappings, - "series": series, - "visualization_settings": card["visualization_settings"], - "sizeX": card["sizeX"], - "sizeY": card["sizeY"], - "row": card["row"], - "col": card["col"], - } - call_api("post", api_add_card_to_dashboard, json=data) - - -def export_databases(collection_items): - database_ids = set() - for item in collection_items: - if item["model"] == "card": - database_ids.add(item["data"]["database_id"]) - database_ids.add(item["data"]["dataset_query"]["database"]) - - database_ids.discard(None) - - databases = [] - for db_id in database_ids: - databases.append(get_database(db_id)) - - return databases - - -def check_if_collection_exists(collection_id): - collections = list_collections() - for collection in collections: - if collection["id"] == collection_id: - collection_id = collection["id"] - break - else: - print("Collection with id '{}' not found.".format(collection_id)) - sys.exit(1) - - -def export_collection(collection_id, file_path): - check_if_collection_exists(collection_id) - - collection_items = get_collection_items(collection_id) - for item in collection_items: - model = item["model"] - id = item["id"] - - if model == "card": - data = get_card(id) - elif model == "dashboard": - data = get_dashboard(id) - item["data"] = data - - databases = export_databases(collection_items) - - export_data = { - "collection_items": collection_items, - "databases": databases, - } - - with open(file_path, "w") as export_file: - json.dump(export_data, export_file, indent=2) - - return collection_items - - -def get_db_names(data, source): - return ["{} ({} - {})".format(db["name"], db["id"], source) for db in data] - - -def map_databases(exported_databases): - dbs = list_databases() - - print( - "\nTo import the data to Metabase you will need to " - "select the database where you want the data to be imported to.\n" - ) - db_ids = [db["id"] for db in dbs] - for exported_db in exported_databases: - while True: - print( - "Select the database where you want to import the data exported " - "from the database '{}'.\n".format(exported_db["name"]) - ) - for db in dbs: - print("{} - {}".format(db["id"], db["name"])) - - print("") - selection = input("\n>>> ") - - try: - if int(selection) in db_ids: - DB_MAPPING[exported_db["id"]] = int(selection) - break - else: - print("\n*** Invalid selection ***\n") - except ValueError: - print("\n** Invalid selection **\n") - - -def load_database_mapping(exported_databases): - map_databases(exported_databases) - - exported_db_ids = {db["id"] for db in exported_databases} - db_ids = DB_MAPPING.values() - - diff_exported_to_mapped = exported_db_ids - set(DB_MAPPING.keys()) - if diff_exported_to_mapped: - print( - "All exported DBs needs to be mapped to be imported. " - "The DBs '{}' are not mapped.".format(diff_exported_to_mapped), - ) - sys.exit(1) - - databases = [] - tables = [] - fields = [] - - for db_id in db_ids: - database = get_database(db_id) - databases.append(database) - - tables.extend(database["tables"]) - for table in database["tables"]: - fields.extend(table["fields"]) - - for exported_db in exported_databases: - for exported_table in exported_db["tables"]: - for table in tables: - if ( - table["name"] == exported_table["name"] - and table["db_id"] == DB_MAPPING[exported_db["id"]] - ): - TABLE_MAPPING[exported_table["id"]] = table["id"] - break - - else: - print( - "Table '{}' doesn't exist on db '{}'.".format( - exported_table["name"], - DB_MAPPING[exported_db["id"]], - ) - ) - sys.exit(1) - - for exported_field in exported_table["fields"]: - for field in fields: - if ( - field["name"] == exported_field["name"] - and field["table_id"] == TABLE_MAPPING[exported_table["id"]] - ): - FIELD_MAPPING[exported_field["id"]] = field["id"] - break - - else: - print( - "Field '{}' doesn't exist on table '{}'.".format( - exported_table["name"], TABLE_MAPPING[exported_table["id"]] - ) - ) - sys.exit(1) - - -def import_collection(export_file, collection_id): - check_if_collection_exists(collection_id) - - with open(export_file) as export_file: - export_data = json.load(export_file) - - load_database_mapping(export_data["databases"]) - - for item in export_data["collection_items"]: - if item["model"] == "card": - create_card(item["data"], collection_id=collection_id) - - for item in export_data["collection_items"]: - if item["model"] == "dashboard": - create_dashboard(item["data"], collection_id=collection_id) - - -def run_import(args): - import_collection(args.import_file, args.collection_id) - - -def run_export(args): - export_collection(args.collection_id, file_path=args.export_file) - - -def get_argparser(): - - parser = argparse.ArgumentParser( - description="Export/Import metabase collection to JSON file.", - epilog=( - "Supports SQL native questions and dashboards. Snippets and non-SQL " - "questions are currently not supported." - ), - ) - parser.add_argument( - "--username", - help="Metabase admin user", - required=True, - ) - parser.add_argument( - "--url", - help="Metabase base URL", - default="http://localhost:3000", - ) - - subparsers = parser.add_subparsers() - - # Export sub-parser - export_parser = subparsers.add_parser("export") - export_parser.add_argument( - "--collection-id", - type=int, - help="The id of the collection to be exported.", - required=True, - ) - export_parser.add_argument( - "--export-file", - help="File path to store the export data in JSON format.", - required=True, - ) - export_parser.set_defaults(func=run_export) - - # Import sub-parser - import_parser = subparsers.add_parser("import") - import_parser.add_argument( - "--collection-id", - type=int, - help="The id of the collection where the data will be imported to.", - required=True, - ) - import_parser.add_argument( - "--import-file", - help="File path to import the data from.", - required=True, - ) - import_parser.set_defaults(func=run_import) - - return parser - - -def main(): - parser = get_argparser() - args = parser.parse_args() - - METABASE_CONFIG["url"] = args.url - - password = getpass.getpass("Password for user {}: ".format(args.username)) - login_response = login(args.username, password) - - if "errors" in login_response: - print("Failed to login in Metabase") - sys.exit(1) - - if hasattr(args, "func"): - args.func(args) - else: - parser.print_usage() - - -if __name__ == "__main__": - main() diff --git a/setup.cfg b/setup.cfg index 68e174b..7fd0c9f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = kanbandash -version = 0.1.0 +version = 0.2.0 [options] packages = find: @@ -10,6 +10,7 @@ install_requires = psycopg2 SQLAlchemy alembic + metabase-import-export include_package_data = True From 95bf2f7e09a1701b89e91c4a7268082f1349a2a5 Mon Sep 17 00:00:00 2001 From: Sergio Oliveira Date: Sun, 8 Nov 2020 16:46:43 -0300 Subject: [PATCH 3/4] Wrapper around metabase-import-export lib --- README.md | 15 +++++-- kanbandash/cli.py | 60 ++++++++++++++++++++++++++- kanbandash/models.py | 24 +++++++++-- kanbandash/populate_with_test_data.py | 25 ++++++++++- setup.cfg | 2 +- 5 files changed, 115 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 41b761f..a676920 100644 --- a/README.md +++ b/README.md @@ -59,12 +59,21 @@ After creating the collection you will have to access it in order to get the col Now we are ready to run the collection import script: ``` -./scripts/metabase-import-export.py \ +kanban-dash metabase --import \ --username= \ - import \ --collection-id= \ - --import-file=kanbandash/kanban-dashboards.json + --url= +``` + +or using our docker container: + ``` +docker run -it cravefood/kanban-dash kanban-dash metabase --import \ + --username= \ + --collection-id= \ + --url= +``` + After running the script you should be able to access the collection and see the imported reports. Select "Kanban" in the dashboards tab to see the dashboard without any data. diff --git a/kanbandash/cli.py b/kanbandash/cli.py index d908d82..e890916 100644 --- a/kanbandash/cli.py +++ b/kanbandash/cli.py @@ -6,6 +6,13 @@ from alembic import command from alembic.config import Config +from metabase_import_export import ( + export_collection, + import_collection, + metabase_login, + set_metabase_url, +) + from . import models from .populate_with_test_data import generate_data @@ -37,12 +44,14 @@ def get_tables(): def drop_tables(): tables = get_tables() - models.Base.metadata.drop_all(models.SQLALCHEMY_ENGINE, tables=tables) + engine = models.get_db_engine() + models.Base.metadata.drop_all(engine, tables=tables) def create_tables(): tables = get_tables() - models.Base.metadata.create_all(models.SQLALCHEMY_ENGINE, tables=tables) + engine = models.get_db_engine() + models.Base.metadata.create_all(engine, tables=tables) migrate_schema() @@ -66,12 +75,59 @@ def manage_models(args): migrate_schema() +def manage_metabase(args): + set_metabase_url(args.url) + metabase_login(args.username) + + if args.export: + print("Where to you want to store your Kanban definitions?") + kanban_definitions_file = input(">> ") + export_collection(kanban_definitions_file, args.collection_id) + else: + kanban_definitions_file = os.path.join(SCRIPT_DIR, "kanban-dashboards.json") + import_collection(kanban_definitions_file, args.collection_id) + + def get_argparser(): parser = argparse.ArgumentParser( description="Metabase Kanban Dashboard manager", ) subparsers = parser.add_subparsers() + # metabase subparsers + metabase = subparsers.add_parser("metabase") + import_export_group = metabase.add_mutually_exclusive_group(required=True) + import_export_group.add_argument( + "--import", + action="store_true", + help=("Import the Kanban dashboard definitions to a Metabase instance."), + ) + import_export_group.add_argument( + "--export", + action="store_true", + help=("Export the Kanban dashboard definitions from a Metabase instance."), + ) + metabase.add_argument( + "--collection-id", + type=int, + help=( + "The id of the collection where the data will be imported to or " + "exported from." + ), + required=True, + ) + metabase.add_argument( + "--url", + help="Metabase base URL", + default="http://localhost:3000", + ) + metabase.add_argument( + "--username", + help="Metabase admin user", + required=True, + ) + metabase.set_defaults(func=manage_metabase) + # models subparsers models = subparsers.add_parser("models") models_group = models.add_mutually_exclusive_group(required=True) diff --git a/kanbandash/models.py b/kanbandash/models.py index 97beb78..3ceb530 100644 --- a/kanbandash/models.py +++ b/kanbandash/models.py @@ -15,11 +15,29 @@ from . import settings -# SQLAlchemy -SQLALCHEMY_ENGINE = create_engine(settings.POSTGRES_DATABASE_URL, echo=False) +SQLALCHEMY_CACHE = {} Base = declarative_base() -Session = sessionmaker(bind=SQLALCHEMY_ENGINE) + + +def get_db_engine(): + if "engine" in SQLALCHEMY_CACHE: + return SQLALCHEMY_CACHE["engine"] + + engine = create_engine(settings.POSTGRES_DATABASE_URL, echo=False) + SQLALCHEMY_CACHE["engine"] = engine + return engine + + +def get_db_session_class(): + if "session" in SQLALCHEMY_CACHE: + return SQLALCHEMY_CACHE["session"] + + engine = get_db_engine() + Session = sessionmaker(bind=engine) + SQLALCHEMY_CACHE["session"] = Session + + return Session class KanbanClassOfService(Base): diff --git a/kanbandash/populate_with_test_data.py b/kanbandash/populate_with_test_data.py index a883195..6b3d296 100755 --- a/kanbandash/populate_with_test_data.py +++ b/kanbandash/populate_with_test_data.py @@ -16,10 +16,10 @@ KanbanClassOfService, KanbanColumn, KanbanDay, - Session, + get_db_session_class, ) -factories_session = Session() +FACTORY_SESSION_CACHE = {} START_DATE = datetime.date(2019, 1, 1) TODAY = datetime.datetime.now().date() @@ -34,7 +34,20 @@ } +def get_factory_session(): + if "session" in FACTORY_SESSION_CACHE: + return FACTORY_SESSION_CACHE["session"] + + Session = get_db_session_class() + factories_session = Session() + FACTORY_SESSION_CACHE["session"] = factories_session + + return factories_session + + def generate_boards(n): + factories_session = get_factory_session() + boards = [] for letter in string.ascii_uppercase: name = "Board {}".format(letter) @@ -68,6 +81,8 @@ def generate_kanban_columns(board): def generate_kanban_days(): + factories_session = get_factory_session() + class_of_services = get_object_ids(KanbanClassOfService) columns_query = factories_session.query(KanbanColumn) date = START_DATE @@ -109,6 +124,8 @@ def generate_kanban_days(): def generate_class_of_services(): + factories_session = get_factory_session() + class_of_services = CLASS_OF_SERVICES.keys() for name in class_of_services: @@ -119,11 +136,15 @@ def generate_class_of_services(): def get_object_ids(Model): + factories_session = get_factory_session() + raw_query_data = factories_session.query(Model.id).all() return list(chain(*raw_query_data)) def generate_kanban_cards(n, m): + factories_session = get_factory_session() + date = START_DATE boards = get_object_ids(KanbanBoard) class_of_services = get_object_ids(KanbanClassOfService) diff --git a/setup.cfg b/setup.cfg index 7fd0c9f..39e4471 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = kanbandash -version = 0.2.0 +version = 0.2.2 [options] packages = find: From a91df94582b48be0968055f925bf291c9b73fe3b Mon Sep 17 00:00:00 2001 From: Thiago Ferreira Date: Wed, 30 Dec 2020 14:56:06 -0300 Subject: [PATCH 4/4] Updated readme with the correct docker image name --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a676920..12db9cb 100644 --- a/README.md +++ b/README.md @@ -140,7 +140,7 @@ metabase-import-export \ or using our docker container: ``` -docker run kanban-dash metabase-import-export \ +docker run cravefood/kanban-dash metabase-import-export \ --username= \ import \ --collection-id= \