From f29af8dfc3a4b4bf6dca91bba9b2b560512d5044 Mon Sep 17 00:00:00 2001 From: Bryant Howell Date: Mon, 3 Dec 2018 16:02:52 +0000 Subject: [PATCH] 4.7.2 fixes an issue with detection of published datasources --- examples/replicate_site_structure_sample.py | 24 +++++++++++++-------- setup.py | 2 +- tableau_documents/tableau_datasource.py | 19 ++++++++++------ 3 files changed, 29 insertions(+), 16 deletions(-) diff --git a/examples/replicate_site_structure_sample.py b/examples/replicate_site_structure_sample.py index 1165ff0..ecc0c38 100644 --- a/examples/replicate_site_structure_sample.py +++ b/examples/replicate_site_structure_sample.py @@ -359,18 +359,25 @@ def __init__(self, orig_content_url): ds_dses = ds_obj.tableau_document.datasources # type: list[TableauDatasource] # You may need to change details of the data source connections here # Uncomment below if you have things to change - # for ds_ds in ds_dses: - # for conn in ds_ds.connections: + credentials = None + for ds_ds in ds_dses: + for conn in ds_ds.connections: # Change the dbname is most common - # conn.dbname = u'prod' - # conn.port = u'10000' + # Credential mapping example, could be much more full + if conn.server.find(u'servername') != -1: + credentials = {u'username': u'uname', u'password': u'pword'} new_ds_filename = ds_obj.save_new_file(u'Updated Datasource') # Here is also where any credential mapping would need to happen, because credentials can't be retrieved - - orig_ds_content_url[ds].new_luid = n.publish_datasource(new_ds_filename, orig_ds_content_url[ds].orig_name, - proj_obj, overwrite=True) + if credentials is not None: + orig_ds_content_url[ds].new_luid = n.publish_datasource(new_ds_filename, orig_ds_content_url[ds].orig_name, + proj_obj, overwrite=True, + connection_username=credentials[u'username'], + connection_password=credentials[u'password']) + else: + orig_ds_content_url[ds].new_luid = n.publish_datasource(new_ds_filename, orig_ds_content_url[ds].orig_name, + proj_obj, overwrite=True) print(u'Published data source, resulting in new luid {}'.format(orig_ds_content_url[ds].new_luid)) # Add to an Extract Schedule if one was scheduled @@ -475,7 +482,7 @@ def __init__(self, orig_content_url): print(u'Published data source, resulting in new luid {}'.format(new_ds_luid)) # Add to an Extract Schedule if one was scheduled - if ds_luid in ds_extract_tasks: + if o_ds_luid in ds_extract_tasks: n.add_datasource_to_schedule(ds_name_or_luid=new_ds_luid, schedule_name_or_luid=ds_extract_tasks[o_ds_luid]) @@ -517,4 +524,3 @@ def __init__(self, orig_content_url): # No way to create an equivalent data driven alert, because while you can get the existence of an Alert from the # originating site, there is no method to ADD a data driven alert to the new site, only to Add a User to an # existing Data Driven Alert -# existing Data Driven Alert \ No newline at end of file diff --git a/setup.py b/setup.py index e9d93a5..644d8d9 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name='tableau_tools', - version='4.7.1', + version='4.7.2', packages=['tableau_tools', 'tableau_tools.tableau_rest_api', 'tableau_tools.tableau_documents', 'tableau_tools.examples'], url='https://github.com/bryantbhowell/tableau_tools', license='', diff --git a/tableau_documents/tableau_datasource.py b/tableau_documents/tableau_datasource.py index b87ee9e..46c7a84 100644 --- a/tableau_documents/tableau_datasource.py +++ b/tableau_documents/tableau_datasource.py @@ -101,6 +101,10 @@ def __init__(self, datasource_xml=None, logger_obj=None, ds_version=None): self.log(u'connection tags found, building a TableauConnection object') new_conn = TableauConnection(connection_xml_obj) self.connections.append(new_conn) + if new_conn.connection_type == u'sqlproxy': + self._published = True + repository_location_xml = self.xml.find(u'repository-location') + self.repository_location = repository_location_xml # Grab the relation elif self.ds_version_type in [u'10', u'10.5']: @@ -113,6 +117,9 @@ def __init__(self, datasource_xml=None, logger_obj=None, ds_version=None): for published_datasource in published_datasources: self.log(u'Published Datasource connection tags found, building a TableauConnection object') self.connections.append(TableauConnection(published_datasource)) + self._published = True + repository_location_xml = self.xml.find(u'repository-location') + self.repository_location = repository_location_xml # Skip the relation if it is a Parameters datasource. Eventually, build out separate object if self.xml.get(u'name') != u'Parameters': @@ -122,13 +129,13 @@ def __init__(self, datasource_xml=None, logger_obj=None, ds_version=None): self.log(u'Found a Parameters datasource') - self.repository_location = None + #self.repository_location = None - if self.xml.find(u'repository-location') is not None: - if len(self.xml.find(u'repository-location')) == 0: - self._published = True - repository_location_xml = self.xml.find(u'repository-location') - self.repository_location = repository_location_xml + #if self.xml.find(u'repository-location') is not None: + # if len(self.xml.find(u'repository-location')) == 0: + # self._published = True + # repository_location_xml = self.xml.find(u'repository-location') + # self.repository_location = repository_location_xml # Grab the extract filename if there is an extract section if self.xml.find(u'extract') is not None: