Skip to content

Commit

Permalink
push code for 0.24 (#1168)
Browse files Browse the repository at this point in the history
* Allow injection of sessions (#1111)
* show server info (#1118)
* Fix bug in exposing ExcelRequestOptions and test (#1123)
* Fix a few pylint errors (#1124)
* fix behavior when url has no protocol (#1125)
* Add permission control for Data Roles and Metrics (Issue #1063) (#1120)
* add option to pass specific datasources (#1150)
* allow user agent to be set by caller (#1166)
* Fix issues with connections publishing workbooks (#1171)
* Allow download to file-like objects (#1172)
* Add updated_at to JobItem class (#1182)
* fix revision references where xml returned does not match docs (#1176)
* Do not create empty connections list (#1178)
---------

Co-authored-by: Marwan Baghdad <mrwanbaghdad76@gmail.com>
Co-authored-by: jorwoods <jorwoods@users.noreply.github.com>
Co-authored-by: Brian Cantoni <bcantoni@salesforce.com>
Co-authored-by: TrimPeachu <77048868+TrimPeachu@users.noreply.github.com>
Co-authored-by: Stu Tomlinson <stu@nosnilmot.com>
Co-authored-by: Jeremy Harris <jercharris89@gmail.com>
  • Loading branch information
7 people committed Feb 14, 2023
1 parent a29ba6c commit 514cc13
Show file tree
Hide file tree
Showing 25 changed files with 439 additions and 226 deletions.
2 changes: 1 addition & 1 deletion tableauserverclient/models/datasource_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class AskDataEnablement:
Disabled = "Disabled"
SiteDefault = "SiteDefault"

def __init__(self, project_id: str, name: str = None) -> None:
def __init__(self, project_id: str, name: Optional[str] = None) -> None:
self._ask_data_enablement = None
self._certified = None
self._certification_note = None
Expand Down
10 changes: 9 additions & 1 deletion tableauserverclient/models/job_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ def __init__(
workbook_id: Optional[str] = None,
datasource_id: Optional[str] = None,
flow_run: Optional[FlowRunItem] = None,
updated_at: Optional["datetime.datetime"] = None,
):
self._id = id_
self._type = job_type
Expand All @@ -47,6 +48,7 @@ def __init__(
self._workbook_id = workbook_id
self._datasource_id = datasource_id
self._flow_run = flow_run
self._updated_at = updated_at

@property
def id(self) -> str:
Expand Down Expand Up @@ -113,9 +115,13 @@ def flow_run(self):
def flow_run(self, value):
self._flow_run = value

@property
def updated_at(self) -> Optional["datetime.datetime"]:
return self._updated_at

def __repr__(self):
return (
"<Job#{_id} {_type} created_at({_created_at}) started_at({_started_at}) completed_at({_completed_at})"
"<Job#{_id} {_type} created_at({_created_at}) started_at({_started_at}) updated_at({_updated_at}) completed_at({_completed_at})"
" progress ({_progress}) finish_code({_finish_code})>".format(**self.__dict__)
)

Expand Down Expand Up @@ -144,6 +150,7 @@ def _parse_element(cls, element, ns):
datasource = element.find(".//t:datasource[@id]", namespaces=ns)
datasource_id = datasource.get("id") if datasource is not None else None
flow_run = None
updated_at = parse_datetime(element.get("updatedAt", None))
for flow_job in element.findall(".//t:runFlowJobType", namespaces=ns):
flow_run = FlowRunItem()
flow_run._id = flow_job.get("flowRunId", None)
Expand All @@ -163,6 +170,7 @@ def _parse_element(cls, element, ns):
workbook_id,
datasource_id,
flow_run,
updated_at,
)


Expand Down
8 changes: 4 additions & 4 deletions tableauserverclient/models/revision_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,10 @@ def from_response(cls, resp: bytes, ns, resource_item) -> List["RevisionItem"]:
revision_item._resource_id = resource_item.id
revision_item._resource_name = resource_item.name
revision_item._revision_number = revision_xml.get("revisionNumber", None)
revision_item._current = string_to_bool(revision_xml.get("isCurrent", ""))
revision_item._deleted = string_to_bool(revision_xml.get("isDeleted", ""))
revision_item._created_at = parse_datetime(revision_xml.get("createdAt", None))
for user in revision_xml.findall(".//t:user", namespaces=ns):
revision_item._current = string_to_bool(revision_xml.get("current", ""))
revision_item._deleted = string_to_bool(revision_xml.get("deleted", ""))
revision_item._created_at = parse_datetime(revision_xml.get("publishedAt", None))
for user in revision_xml.findall(".//t:publisher", namespaces=ns):
revision_item._user_id = user.get("id", None)
revision_item._user_name = user.get("name", None)

Expand Down
6 changes: 3 additions & 3 deletions tableauserverclient/models/site_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,9 @@ def __init__(
self,
name: str,
content_url: str,
admin_mode: str = None,
user_quota: int = None,
storage_quota: int = None,
admin_mode: Optional[str] = None,
user_quota: Optional[int] = None,
storage_quota: Optional[int] = None,
disable_subscriptions: bool = False,
subscribe_others_enabled: bool = True,
revision_history_enabled: bool = False,
Expand Down
2 changes: 1 addition & 1 deletion tableauserverclient/models/workbook_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@


class WorkbookItem(object):
def __init__(self, project_id: str, name: str = None, show_tabs: bool = False) -> None:
def __init__(self, project_id: str, name: Optional[str] = None, show_tabs: bool = False) -> None:
self._connections = None
self._content_url = None
self._webpage_url = None
Expand Down
97 changes: 32 additions & 65 deletions tableauserverclient/server/endpoint/datasources_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,22 +31,9 @@
)
from ...models import ConnectionCredentials, RevisionItem
from ...models.job_item import JobItem
from ...models import ConnectionCredentials

io_types = (io.BytesIO, io.BufferedReader)

from pathlib import Path
from typing import (
List,
Mapping,
Optional,
Sequence,
Tuple,
TYPE_CHECKING,
Union,
)

io_types = (io.BytesIO, io.BufferedReader)
io_types_r = (io.BytesIO, io.BufferedReader)
io_types_w = (io.BytesIO, io.BufferedWriter)

# The maximum size of a file that can be published in a single request is 64MB
FILESIZE_LIMIT = 1024 * 1024 * 64 # 64MB
Expand All @@ -61,8 +48,10 @@
from .schedules_endpoint import AddResponse

FilePath = Union[str, os.PathLike]
FileObject = Union[io.BufferedReader, io.BytesIO]
PathOrFile = Union[FilePath, FileObject]
FileObjectR = Union[io.BufferedReader, io.BytesIO]
FileObjectW = Union[io.BufferedWriter, io.BytesIO]
PathOrFileR = Union[FilePath, FileObjectR]
PathOrFileW = Union[FilePath, FileObjectW]


class Datasources(QuerysetEndpoint):
Expand All @@ -80,7 +69,7 @@ def baseurl(self) -> str:

# Get all datasources
@api(version="2.0")
def get(self, req_options: RequestOptions = None) -> Tuple[List[DatasourceItem], PaginationItem]:
def get(self, req_options: Optional[RequestOptions] = None) -> Tuple[List[DatasourceItem], PaginationItem]:
logger.info("Querying all datasources on site")
url = self.baseurl
server_response = self.get_request(url, req_options)
Expand Down Expand Up @@ -135,39 +124,11 @@ def delete(self, datasource_id: str) -> None:
def download(
self,
datasource_id: str,
filepath: FilePath = None,
filepath: Optional[PathOrFileW] = None,
include_extract: bool = True,
no_extract: Optional[bool] = None,
) -> str:
if not datasource_id:
error = "Datasource ID undefined."
raise ValueError(error)
url = "{0}/{1}/content".format(self.baseurl, datasource_id)

if no_extract is False or no_extract is True:
import warnings

warnings.warn(
"no_extract is deprecated, use include_extract instead.",
DeprecationWarning,
)
include_extract = not no_extract

if not include_extract:
url += "?includeExtract=False"

with closing(self.get_request(url, parameters={"stream": True})) as server_response:
_, params = cgi.parse_header(server_response.headers["Content-Disposition"])
filename = to_filename(os.path.basename(params["filename"]))

download_path = make_download_path(filepath, filename)

with open(download_path, "wb") as f:
for chunk in server_response.iter_content(1024): # 1KB
f.write(chunk)

logger.info("Downloaded datasource to {0} (ID: {1})".format(download_path, datasource_id))
return os.path.abspath(download_path)
return self.download_revision(datasource_id, None, filepath, include_extract, no_extract)

# Update datasource
@api(version="2.0")
Expand Down Expand Up @@ -232,10 +193,10 @@ def delete_extract(self, datasource_item: DatasourceItem) -> None:
def publish(
self,
datasource_item: DatasourceItem,
file: PathOrFile,
file: PathOrFileR,
mode: str,
connection_credentials: ConnectionCredentials = None,
connections: Sequence[ConnectionItem] = None,
connection_credentials: Optional[ConnectionCredentials] = None,
connections: Optional[Sequence[ConnectionItem]] = None,
as_job: bool = False,
) -> Union[DatasourceItem, JobItem]:

Expand All @@ -255,8 +216,7 @@ def publish(
error = "Only {} files can be published as datasources.".format(", ".join(ALLOWED_FILE_EXTENSIONS))
raise ValueError(error)

elif isinstance(file, io_types):

elif isinstance(file, io_types_r):
if not datasource_item.name:
error = "Datasource item must have a name when passing a file object"
raise ValueError(error)
Expand Down Expand Up @@ -302,7 +262,7 @@ def publish(
if isinstance(file, (Path, str)):
with open(file, "rb") as f:
file_contents = f.read()
elif isinstance(file, io_types):
elif isinstance(file, io_types_r):
file_contents = file.read()
else:
raise TypeError("file should be a filepath or file object.")
Expand Down Expand Up @@ -433,14 +393,17 @@ def download_revision(
self,
datasource_id: str,
revision_number: str,
filepath: Optional[PathOrFile] = None,
filepath: Optional[PathOrFileW] = None,
include_extract: bool = True,
no_extract: Optional[bool] = None,
) -> str:
) -> PathOrFileW:
if not datasource_id:
error = "Datasource ID undefined."
raise ValueError(error)
url = "{0}/{1}/revisions/{2}/content".format(self.baseurl, datasource_id, revision_number)
if revision_number is None:
url = "{0}/{1}/content".format(self.baseurl, datasource_id)
else:
url = "{0}/{1}/revisions/{2}/content".format(self.baseurl, datasource_id, revision_number)
if no_extract is False or no_extract is True:
import warnings

Expand All @@ -455,18 +418,22 @@ def download_revision(

with closing(self.get_request(url, parameters={"stream": True})) as server_response:
_, params = cgi.parse_header(server_response.headers["Content-Disposition"])
filename = to_filename(os.path.basename(params["filename"]))

download_path = make_download_path(filepath, filename)

with open(download_path, "wb") as f:
if isinstance(filepath, io_types_w):
for chunk in server_response.iter_content(1024): # 1KB
f.write(chunk)
filepath.write(chunk)
return_path = filepath
else:
filename = to_filename(os.path.basename(params["filename"]))
download_path = make_download_path(filepath, filename)
with open(download_path, "wb") as f:
for chunk in server_response.iter_content(1024): # 1KB
f.write(chunk)
return_path = os.path.abspath(download_path)

logger.info(
"Downloaded datasource revision {0} to {1} (ID: {2})".format(revision_number, download_path, datasource_id)
"Downloaded datasource revision {0} to {1} (ID: {2})".format(revision_number, return_path, datasource_id)
)
return os.path.abspath(download_path)
return return_path

@api(version="2.3")
def delete_revision(self, datasource_id: str, revision_number: str) -> None:
Expand Down
54 changes: 35 additions & 19 deletions tableauserverclient/server/endpoint/endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from packaging.version import Version
from functools import wraps
from xml.etree.ElementTree import ParseError
from typing import Any, Callable, Dict, Optional, TYPE_CHECKING
from typing import Any, Callable, Dict, Optional, TYPE_CHECKING, Mapping

from .exceptions import (
ServerResponseError,
Expand Down Expand Up @@ -35,15 +35,35 @@ def __init__(self, parent_srv: "Server"):
self.parent_srv = parent_srv

@staticmethod
def _make_common_headers(auth_token, content_type):
_client_version: Optional[str] = get_versions()["version"]
headers = {}
def set_parameters(http_options, auth_token, content, content_type, parameters) -> Dict[str, Any]:
parameters = parameters or {}
parameters.update(http_options)
if "headers" not in parameters:
parameters["headers"] = {}

if auth_token is not None:
headers[TABLEAU_AUTH_HEADER] = auth_token
parameters["headers"][TABLEAU_AUTH_HEADER] = auth_token
if content_type is not None:
headers[CONTENT_TYPE_HEADER] = content_type
headers[USER_AGENT_HEADER] = "Tableau Server Client/{}".format(_client_version)
return headers
parameters["headers"][CONTENT_TYPE_HEADER] = content_type

Endpoint.set_user_agent(parameters)
if content is not None:
parameters["data"] = content
return parameters or {}

@staticmethod
def set_user_agent(parameters):
if USER_AGENT_HEADER not in parameters["headers"]:
if USER_AGENT_HEADER in parameters:
parameters["headers"][USER_AGENT_HEADER] = parameters[USER_AGENT_HEADER]
else:
# only set the TSC user agent if not already populated
_client_version: Optional[str] = get_versions()["version"]
parameters["headers"][USER_AGENT_HEADER] = "Tableau Server Client/{}".format(_client_version)

# result: parameters["headers"]["User-Agent"] is set
# return explicitly for testing only
return parameters

def _make_request(
self,
Expand All @@ -54,18 +74,14 @@ def _make_request(
content_type: Optional[str] = None,
parameters: Optional[Dict[str, Any]] = None,
) -> "Response":
parameters = parameters or {}
if "headers" not in parameters:
parameters["headers"] = {}
parameters.update(self.parent_srv.http_options)
parameters["headers"].update(Endpoint._make_common_headers(auth_token, content_type))

if content is not None:
parameters["data"] = content
parameters = Endpoint.set_parameters(
self.parent_srv.http_options, auth_token, content, content_type, parameters
)

logger.debug("request {}, url: {}".format(method.__name__, url))
logger.debug("request {}, url: {}".format(method, url))
if content:
logger.debug("request content: {}".format(helpers.strings.redact_xml(content[:1000])))
redacted = helpers.strings.redact_xml(content[:1000])
logger.debug("request content: {}".format(redacted))

server_response = method(url, **parameters)
self._check_status(server_response, url)
Expand All @@ -78,7 +94,7 @@ def _make_request(

return server_response

def _check_status(self, server_response, url: str = None):
def _check_status(self, server_response, url: Optional[str] = None):
if server_response.status_code >= 500:
raise InternalServerError(server_response, url)
elif server_response.status_code not in Success_codes:
Expand Down
3 changes: 2 additions & 1 deletion tableauserverclient/server/endpoint/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from defusedxml.ElementTree import fromstring
from typing import Optional


class TableauError(Exception):
Expand Down Expand Up @@ -33,7 +34,7 @@ def from_response(cls, resp, ns, url=None):


class InternalServerError(TableauError):
def __init__(self, server_response, request_url: str = None):
def __init__(self, server_response, request_url: Optional[str] = None):
self.code = server_response.status_code
self.content = server_response.content
self.url = request_url or "server"
Expand Down

0 comments on commit 514cc13

Please sign in to comment.