Skip to content

Commit

Permalink
chore: fix lint and remove incorrect integration mark from unit tests (
Browse files Browse the repository at this point in the history
…#4621)

* chore: fix lint and remove incorrect integration mark from unit tests

* add to test requirements

* revert athena source tests
  • Loading branch information
anshbansal authored Apr 8, 2022
1 parent cd43a4a commit 61a95f4
Show file tree
Hide file tree
Showing 9 changed files with 13 additions and 39 deletions.
4 changes: 3 additions & 1 deletion metadata-ingestion/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,9 @@ def get_long_description():
"bigquery-usage",
"clickhouse",
"clickhouse-usage",
"druid",
"elasticsearch",
"ldap",
"looker",
"glue",
"mariadb",
Expand All @@ -310,7 +312,7 @@ def get_long_description():
"trino",
"hive",
"starburst-trino-usage",
"powerbi"
"powerbi",
# airflow is added below
]
for dependency in plugins[plugin]
Expand Down
4 changes: 2 additions & 2 deletions metadata-ingestion/src/datahub/ingestion/source/tableau.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ class TableauSource(Source):
config: TableauConfig
report: SourceReport
platform = "tableau"
server: Server
server: Optional[Server]
upstream_tables: Dict[str, Tuple[Any, str]] = {}

def __hash__(self):
Expand All @@ -136,7 +136,7 @@ def close(self) -> None:

def _authenticate(self):
# https://tableau.github.io/server-client-python/docs/api-ref#authentication
authentication = None
authentication: Optional[Union[TableauAuth, PersonalAccessTokenAuth]] = None
if self.config.username and self.config.password:
authentication = TableauAuth(
username=self.config.username,
Expand Down
2 changes: 1 addition & 1 deletion metadata-ingestion/tests/unit/test_athena_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import pytest
from freezegun import freeze_time

from datahub.ingestion.api.common import PipelineContext
from src.datahub.ingestion.source.aws.s3_util import make_s3_urn

FROZEN_TIME = "2020-04-14 07:00:00"
Expand Down Expand Up @@ -34,7 +35,6 @@ def test_athena_uri():
def test_athena_get_table_properties():
from pyathena.model import AthenaTableMetadata

from datahub.ingestion.api.common import PipelineContext
from datahub.ingestion.source.sql.athena import AthenaConfig, AthenaSource

config = AthenaConfig.parse_obj(
Expand Down
4 changes: 1 addition & 3 deletions metadata-ingestion/tests/unit/test_bigquery_usage_source.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
import json
import os

import pytest
from datahub.ingestion.source.usage.bigquery_usage import BigQueryUsageConfig


@pytest.mark.integration
def test_bigquery_uri_with_credential():
from datahub.ingestion.source.usage.bigquery_usage import BigQueryUsageConfig

expected_credential_json = {
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
Expand Down
8 changes: 1 addition & 7 deletions metadata-ingestion/tests/unit/test_clickhouse_source.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import pytest
from datahub.ingestion.source.sql.clickhouse import ClickHouseConfig


@pytest.mark.integration
def test_clickhouse_uri_https():
from datahub.ingestion.source.sql.clickhouse import ClickHouseConfig

config = ClickHouseConfig.parse_obj(
{
Expand All @@ -20,9 +18,7 @@ def test_clickhouse_uri_https():
)


@pytest.mark.integration
def test_clickhouse_uri_native():
from datahub.ingestion.source.sql.clickhouse import ClickHouseConfig

config = ClickHouseConfig.parse_obj(
{
Expand All @@ -35,9 +31,7 @@ def test_clickhouse_uri_native():
assert config.get_sql_alchemy_url() == "clickhouse+native://user:password@host:1111"


@pytest.mark.integration
def test_clickhouse_uri_native_secure():
from datahub.ingestion.source.sql.clickhouse import ClickHouseConfig

config = ClickHouseConfig.parse_obj(
{
Expand Down
4 changes: 1 addition & 3 deletions metadata-ingestion/tests/unit/test_druid_source.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import pytest
from datahub.ingestion.source.sql.druid import DruidConfig


@pytest.mark.integration
def test_druid_uri():
from datahub.ingestion.source.sql.druid import DruidConfig

config = DruidConfig.parse_obj({"host_port": "localhost:8082"})

Expand Down
7 changes: 2 additions & 5 deletions metadata-ingestion/tests/unit/test_hive_source.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
import deepdiff
import pytest

from datahub.ingestion.api.common import PipelineContext
from datahub.ingestion.source.sql.hive import HiveConfig, HiveSource
from datahub.utilities.hive_schema_to_avro import get_avro_schema_for_hive_column


@pytest.mark.integration
def test_hive_configuration_get_identifier_with_database():
from datahub.ingestion.api.common import PipelineContext
from datahub.ingestion.source.sql.hive import HiveConfig, HiveSource

test_db_name = "test_database"
# test_table_name = "test_table"
Expand All @@ -26,7 +24,6 @@ def test_hive_configuration_get_identifier_with_database():
assert output == [expected_output]


@pytest.mark.integration
def test_hive_configuration_get_avro_schema_from_native_data_type():
# Test 3 - struct of struct
datatype_string = "struct<type:string,provider:array<int>,abc:struct<t1:string>>"
Expand Down
6 changes: 2 additions & 4 deletions metadata-ingestion/tests/unit/test_ldap_source.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,16 @@
import pytest

from datahub.ingestion.source.ldap import parse_from_attrs, strip_ldap_info


@pytest.mark.integration
def test_strip_ldap_info():
from datahub.ingestion.source.ldap import strip_ldap_info

assert (
strip_ldap_info(b"uid=firstname.surname,ou=People,dc=internal,dc=machines")
== "firstname.surname"
)


@pytest.mark.integration
@pytest.mark.parametrize(
"input, expected",
[
Expand All @@ -35,7 +34,6 @@ def test_strip_ldap_info():
],
)
def test_parse_from_attrs(input, expected):
from datahub.ingestion.source.ldap import parse_from_attrs

assert (
parse_from_attrs(
Expand Down
13 changes: 0 additions & 13 deletions metadata-ingestion/tests/unit/test_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ def maker(n):
]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand All @@ -52,7 +51,6 @@ def test_metadatasql_sql_parser_get_tables_from_simple_query():
assert tables_list == ["bar", "foo"]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand All @@ -64,7 +62,6 @@ def test_sqllineage_sql_parser_get_tables_from_simple_query():
assert tables_list == ["bar", "foo"]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand Down Expand Up @@ -121,7 +118,6 @@ def test_sqllineage_sql_parser_get_tables_from_complex_query():
assert tables_list == ["schema1.foo", "schema2.bar"]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand All @@ -141,7 +137,6 @@ def test_sqllineage_sql_parser_get_columns_from_simple_query():
assert columns_list == ["a", "b"]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand All @@ -153,7 +148,6 @@ def test_metadatasql_sql_parser_get_columns_with_alias_and_count_star():
assert columns_list == ["a", "b", "count", "test"]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand Down Expand Up @@ -181,7 +175,6 @@ def test_metadatasql_sql_parser_get_columns_with_more_complex_join():
assert columns_list == ["bs", "pi", "pt", "pu", "v"]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand Down Expand Up @@ -238,7 +231,6 @@ def test_sqllineage_sql_parser_get_columns_complex_query_with_union():
assert columns_list == ["c", "date", "e", "u", "x"]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand All @@ -257,7 +249,6 @@ def test_metadatasql_sql_parser_get_tables_from_templated_query():
assert tables_list == ["my_view.SQL_TABLE_NAME"]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand All @@ -276,7 +267,6 @@ def test_sqllineage_sql_parser_get_tables_from_templated_query():
assert tables_list == ["my_view.SQL_TABLE_NAME"]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand All @@ -295,7 +285,6 @@ def test_metadatasql_sql_parser_get_columns_from_templated_query():
assert columns_list == ["city", "country", "measurement", "timestamp"]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand All @@ -314,7 +303,6 @@ def test_sqllineage_sql_parser_get_columns_from_templated_query():
assert columns_list == ["city", "country", "measurement", "timestamp"]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand All @@ -329,7 +317,6 @@ def test_sqllineage_sql_parser_with_weird_lookml_query():
assert columns_list == ["aliased_platform", "country", "date"]


@pytest.mark.integration
@pytest.mark.skipif(
sys.version_info < (3, 7), reason="The LookML source requires Python 3.7+"
)
Expand Down

0 comments on commit 61a95f4

Please sign in to comment.