Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move version info around, clean up some bumpversion stuff #72

Merged
merged 3 commits into from
Apr 2, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions .bumpversion-dbt.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
[bumpversion]
current_version = 0.16.0
parse = (?P<major>\d+)
\.(?P<minor>\d+)
\.(?P<patch>\d+)
((?P<prerelease>[a-z]+)(?P<num>\d+))?
serialize =
{major}.{minor}.{patch}{prerelease}{num}
{major}.{minor}.{patch}
commit = False
tag = False

[bumpversion:part:prerelease]
first_value = a
values =
a
b
rc

[bumpversion:part:num]
first_value = 1

[bumpversion:file:setup.py]

[bumpversion:file:requirements.txt]
11 changes: 8 additions & 3 deletions .bumpversion.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,27 @@ current_version = 0.16.0a1
parse = (?P<major>\d+)
\.(?P<minor>\d+)
\.(?P<patch>\d+)
(\.(?P<pluginpatch>\d+))?
((?P<prerelease>[a-z]+)(?P<num>\d+))?
serialize =
serialize =
{major}.{minor}.{patch}.{pluginpatch}{prerelease}{num}
{major}.{minor}.{patch}{prerelease}{num}
{major}.{minor}.{patch}.{pluginpatch}
{major}.{minor}.{patch}
commit = False
tag = False

[bumpversion:part:prerelease]
first_value = a
values =
values =
a
b
rc

[bumpversion:part:num]
first_value = 1

[bumpversion:file:setup.py]
[bumpversion:part:pluginpatch]
first_value = 1

[bumpversion:file:dbt/adapters/spark/__version__.py]
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
version: 2
version: 2.1

jobs:
unit:
Expand Down
1 change: 1 addition & 0 deletions dbt/adapters/spark/__version__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
version = "0.16.0a1"
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
dbt-core==0.15.3
dbt-core==0.16.0
PyHive[hive]>=0.6.0,<0.7.0
thrift>=0.11.0,<0.12.0
24 changes: 19 additions & 5 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#!/usr/bin/env python
from setuptools import find_packages, setup
import os
import re


this_directory = os.path.abspath(os.path.dirname(__file__))
Expand All @@ -9,18 +10,31 @@


package_name = "dbt-spark"
package_version = "0.16.0a1"


# get this from a separate file
def _dbt_spark_version():
_version_path = os.path.join(
this_directory, 'dbt', 'adapters', 'spark', '__version__.py'
)
_version_pattern = r'''version\s*=\s*["'](.+)["']'''
with open(_version_path) as f:
match = re.search(_version_pattern, f.read().strip())
if match is None:
raise ValueError(f'invalid version at {_version_path}')
return match.group(1)


package_version = _dbt_spark_version()
description = """The SparkSQL plugin for dbt (data build tool)"""

# evade bumpversion with this fun trick
DBT_VERSION = (0, 16, 0)
dbt_version = '.'.join(map(str, DBT_VERSION))
dbt_version = '0.16.0'
# the package version should be the dbt version, with maybe some things on the
# ends of it. (0.16.0 vs 0.16.0a1, 0.16.0.1, ...)
if not package_version.startswith(dbt_version):
raise ValueError(
f'Invalid setup.py: package_version={package_version} must start with '
f'dbt_version={dbt_version} (from {DBT_VERSION})'
f'dbt_version={dbt_version}'
)


Expand Down
4 changes: 2 additions & 2 deletions test/unit/test_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def hive_thrift_connect(host, port, username):

def test_parse_relation(self):
self.maxDiff = None
rel_type = SparkRelation.RelationType.Table
rel_type = SparkRelation.get_relation_type.Table

relation = SparkRelation.create(
database='default_database',
Expand Down Expand Up @@ -179,7 +179,7 @@ def test_parse_relation(self):

def test_parse_relation_with_statistics(self):
self.maxDiff = None
rel_type = SparkRelation.RelationType.Table
rel_type = SparkRelation.get_relation_type.Table

relation = SparkRelation.create(
database='default_database',
Expand Down
93 changes: 84 additions & 9 deletions test/unit/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Unit test utility functions.
Note that all imports should be inside the functions to avoid import/mocking
issues.
"""
Expand All @@ -11,6 +12,7 @@

def normalize(path):
"""On windows, neither is enough on its own:
>>> normcase('C:\\documents/ALL CAPS/subdir\\..')
'c:\\documents\\all caps\\subdir\\..'
>>> normpath('C:\\documents/ALL CAPS/subdir\\..')
Expand All @@ -23,6 +25,7 @@ def normalize(path):

class Obj:
which = 'blah'
single_threaded = False


def mock_connection(name):
Expand All @@ -31,20 +34,63 @@ def mock_connection(name):
return conn


def config_from_parts_or_dicts(project, profile, packages=None, cli_vars='{}'):
from dbt.config import Project, Profile, RuntimeConfig
def profile_from_dict(profile, profile_name, cli_vars='{}'):
from dbt.config import Profile, ConfigRenderer
from dbt.context.base import generate_base_context
from dbt.utils import parse_cli_vars
from copy import deepcopy
if not isinstance(cli_vars, dict):
cli_vars = parse_cli_vars(cli_vars)
if not isinstance(project, Project):
project = Project.from_project_config(deepcopy(project), packages)

renderer = ConfigRenderer(generate_base_context(cli_vars))
return Profile.from_raw_profile_info(
profile,
profile_name,
renderer,
)


def project_from_dict(project, profile, packages=None, cli_vars='{}'):
from dbt.context.target import generate_target_context
from dbt.config import Project, ConfigRenderer
from dbt.utils import parse_cli_vars
if not isinstance(cli_vars, dict):
cli_vars = parse_cli_vars(cli_vars)

renderer = ConfigRenderer(generate_target_context(profile, cli_vars))

project_root = project.pop('project-root', os.getcwd())

return Project.render_from_dict(
project_root, project, packages, renderer
)


def config_from_parts_or_dicts(project, profile, packages=None, cli_vars='{}'):
from dbt.config import Project, Profile, RuntimeConfig
from copy import deepcopy

if isinstance(project, Project):
profile_name = project.profile_name
else:
profile_name = project.get('profile')

if not isinstance(profile, Profile):
profile = Profile.from_raw_profile_info(deepcopy(profile),
project.profile_name,
cli_vars)
profile = profile_from_dict(
deepcopy(profile),
profile_name,
cli_vars,
)

if not isinstance(project, Project):
project = project_from_dict(
deepcopy(project),
profile,
packages,
cli_vars,
)

args = Obj()
args.vars = repr(cli_vars)
args.vars = cli_vars
args.profile_dir = '/dev/null'
return RuntimeConfig.from_parts(
project=project,
Expand Down Expand Up @@ -88,3 +134,32 @@ def assert_fails_validation(self, dct, cls=None):

with self.assertRaises(ValidationError):
cls.from_dict(dct)


def generate_name_macros(package):
from dbt.contracts.graph.parsed import ParsedMacro
from dbt.node_types import NodeType
name_sql = {}
for component in ('database', 'schema', 'alias'):
if component == 'alias':
source = 'node.name'
else:
source = f'target.{component}'
name = f'generate_{component}_name'
sql = f'{{% macro {name}(value, node) %}} {{% if value %}} {{{{ value }}}} {{% else %}} {{{{ {source} }}}} {{% endif %}} {{% endmacro %}}'
name_sql[name] = sql

all_sql = '\n'.join(name_sql.values())
for name, sql in name_sql.items():
pm = ParsedMacro(
name=name,
resource_type=NodeType.Macro,
unique_id=f'macro.{package}.{name}',
package_name=package,
original_file_path=normalize('macros/macro.sql'),
root_path='./dbt_modules/root',
path=normalize('macros/macro.sql'),
raw_sql=all_sql,
macro_sql=sql,
)
yield pm