Skip to content

Commit

Permalink
chore: switching out ConnectorRegistry references for DatasourceDAO (a…
Browse files Browse the repository at this point in the history
…pache#20380)

* rename and move dao file

* Update dao.py

* add cachekey

* Update __init__.py

* change reference in query context test

* add utils ref

* more ref changes

* add helpers

* add todo in dashboard.py

* add cachekey

* circular import error in dar.py

* push rest of refs

* fix linting

* fix more linting

* update enum

* remove references for connector registry

* big reafctor

* take value

* fix

* test to see if removing value works

* delete connectregistry

* address concerns

* address comments

* fix merge conflicts

* address concern II

* address concern II

* fix test

Co-authored-by: Phillip Kelley-Dotson <pkelleydotson@yahoo.com>
  • Loading branch information
2 people authored and akshatsri committed Jul 19, 2022
1 parent a1e233d commit 60c7811
Show file tree
Hide file tree
Showing 34 changed files with 333 additions and 504 deletions.
1 change: 0 additions & 1 deletion superset/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from werkzeug.local import LocalProxy

from superset.app import create_app
from superset.connectors.connector_registry import ConnectorRegistry
from superset.extensions import (
appbuilder,
cache_manager,
Expand Down
6 changes: 3 additions & 3 deletions superset/cachekeys/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from sqlalchemy.exc import SQLAlchemyError

from superset.cachekeys.schemas import CacheInvalidationRequestSchema
from superset.connectors.connector_registry import ConnectorRegistry
from superset.connectors.sqla.models import SqlaTable
from superset.extensions import cache_manager, db, event_logger
from superset.models.cache import CacheKey
from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics
Expand Down Expand Up @@ -83,13 +83,13 @@ def invalidate(self) -> Response:
return self.response_400(message=str(error))
datasource_uids = set(datasources.get("datasource_uids", []))
for ds in datasources.get("datasources", []):
ds_obj = ConnectorRegistry.get_datasource_by_name(
ds_obj = SqlaTable.get_datasource_by_name(
session=db.session,
datasource_type=ds.get("datasource_type"),
datasource_name=ds.get("datasource_name"),
schema=ds.get("schema"),
database_name=ds.get("database_name"),
)

if ds_obj:
datasource_uids.add(ds_obj.uid)

Expand Down
11 changes: 6 additions & 5 deletions superset/commands/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,10 @@
OwnersNotFoundValidationError,
RolesNotFoundValidationError,
)
from superset.connectors.connector_registry import ConnectorRegistry
from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.dao.exceptions import DatasourceNotFound
from superset.datasource.dao import DatasourceDAO
from superset.extensions import db, security_manager
from superset.utils.core import DatasourceType

if TYPE_CHECKING:
from superset.connectors.base.models import BaseDatasource
Expand Down Expand Up @@ -79,8 +80,8 @@ def populate_roles(role_ids: Optional[List[int]] = None) -> List[Role]:

def get_datasource_by_id(datasource_id: int, datasource_type: str) -> BaseDatasource:
try:
return ConnectorRegistry.get_datasource(
datasource_type, datasource_id, db.session
return DatasourceDAO.get_datasource(
db.session, DatasourceType(datasource_type), datasource_id
)
except DatasetNotFoundError as ex:
except DatasourceNotFound as ex:
raise DatasourceNotFoundValidationError() from ex
10 changes: 5 additions & 5 deletions superset/common/query_context_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from superset.common.query_context import QueryContext
from superset.common.query_object_factory import QueryObjectFactory
from superset.connectors.connector_registry import ConnectorRegistry
from superset.utils.core import DatasourceDict
from superset.datasource.dao import DatasourceDAO
from superset.utils.core import DatasourceDict, DatasourceType

if TYPE_CHECKING:
from superset.connectors.base.models import BaseDatasource
Expand All @@ -32,7 +32,7 @@


def create_query_object_factory() -> QueryObjectFactory:
return QueryObjectFactory(config, ConnectorRegistry(), db.session)
return QueryObjectFactory(config, DatasourceDAO(), db.session)


class QueryContextFactory: # pylint: disable=too-few-public-methods
Expand Down Expand Up @@ -82,6 +82,6 @@ def create(

# pylint: disable=no-self-use
def _convert_to_model(self, datasource: DatasourceDict) -> BaseDatasource:
return ConnectorRegistry.get_datasource(
str(datasource["type"]), int(datasource["id"]), db.session
return DatasourceDAO.get_datasource(
db.session, DatasourceType(datasource["type"]), int(datasource["id"])
)
16 changes: 9 additions & 7 deletions superset/common/query_object_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,29 +21,29 @@

from superset.common.chart_data import ChartDataResultType
from superset.common.query_object import QueryObject
from superset.utils.core import apply_max_row_limit, DatasourceDict
from superset.utils.core import apply_max_row_limit, DatasourceDict, DatasourceType
from superset.utils.date_parser import get_since_until

if TYPE_CHECKING:
from sqlalchemy.orm import sessionmaker

from superset import ConnectorRegistry
from superset.connectors.base.models import BaseDatasource
from superset.datasource.dao import DatasourceDAO


class QueryObjectFactory: # pylint: disable=too-few-public-methods
_config: Dict[str, Any]
_connector_registry: ConnectorRegistry
_datasource_dao: DatasourceDAO
_session_maker: sessionmaker

def __init__(
self,
app_configurations: Dict[str, Any],
connector_registry: ConnectorRegistry,
_datasource_dao: DatasourceDAO,
session_maker: sessionmaker,
):
self._config = app_configurations
self._connector_registry = connector_registry
self._datasource_dao = _datasource_dao
self._session_maker = session_maker

def create( # pylint: disable=too-many-arguments
Expand Down Expand Up @@ -75,8 +75,10 @@ def create( # pylint: disable=too-many-arguments
)

def _convert_to_model(self, datasource: DatasourceDict) -> BaseDatasource:
return self._connector_registry.get_datasource(
str(datasource["type"]), int(datasource["id"]), self._session_maker()
return self._datasource_dao.get_datasource(
datasource_type=DatasourceType(datasource["type"]),
datasource_id=int(datasource["id"]),
session=self._session_maker(),
)

def _process_extras( # pylint: disable=no-self-use
Expand Down
164 changes: 0 additions & 164 deletions superset/connectors/connector_registry.py

This file was deleted.

43 changes: 43 additions & 0 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
List,
NamedTuple,
Optional,
Set,
Tuple,
Type,
Union,
Expand Down Expand Up @@ -2021,6 +2022,48 @@ def query_datasources_by_name(
query = query.filter_by(schema=schema)
return query.all()

@classmethod
def query_datasources_by_permissions( # pylint: disable=invalid-name
cls,
session: Session,
database: Database,
permissions: Set[str],
schema_perms: Set[str],
) -> List["SqlaTable"]:
# TODO(hughhhh): add unit test
return (
session.query(cls)
.filter_by(database_id=database.id)
.filter(
or_(
SqlaTable.perm.in_(permissions),
SqlaTable.schema_perm.in_(schema_perms),
)
)
.all()
)

@classmethod
def get_eager_sqlatable_datasource(
cls, session: Session, datasource_id: int
) -> "SqlaTable":
"""Returns SqlaTable with columns and metrics."""
return (
session.query(cls)
.options(
sa.orm.subqueryload(cls.columns),
sa.orm.subqueryload(cls.metrics),
)
.filter_by(id=datasource_id)
.one()
)

@classmethod
def get_all_datasources(cls, session: Session) -> List["SqlaTable"]:
qry = session.query(cls)
qry = cls.default_query(qry)
return qry.all()

@staticmethod
def default_query(qry: Query) -> Query:
return qry.filter_by(is_sqllab_view=False)
Expand Down
Loading

0 comments on commit 60c7811

Please sign in to comment.