Skip to content

Commit

Permalink
Initial GCS S3 support
Browse files Browse the repository at this point in the history
  • Loading branch information
shnela committed Oct 21, 2022
1 parent 9a0e977 commit 13365ef
Showing 1 changed file with 15 additions and 2 deletions.
17 changes: 15 additions & 2 deletions src/neptune/new/internal/artifacts/drivers/s3.py
Expand Up @@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import pathlib
import typing
from datetime import datetime
Expand All @@ -36,6 +37,18 @@
class S3ArtifactDriver(ArtifactDriver):
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"

@staticmethod
def get_boto_resource():
access_key_id = os.getenv("S3_ACCESS_KEY_ID")
secret_access_key = os.getenv("S3_SECRET_ACCESS_KEY")
endpoint_url = os.getenv("S3_ENDPOINT_URL")
return boto3.resource(
service_name="s3",
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key,
endpoint_url=endpoint_url,
)

@staticmethod
def get_type() -> str:
return ArtifactFileType.S3.value
Expand Down Expand Up @@ -69,7 +82,7 @@ def get_tracked_files(cls, path: str, destination: str = None) -> typing.List[Ar
)

# pylint: disable=no-member
remote_storage = boto3.resource("s3").Bucket(bucket_name)
remote_storage = cls.get_boto_resource().Bucket(bucket_name)

stored_files: typing.List[ArtifactFileData] = list()

Expand Down Expand Up @@ -115,7 +128,7 @@ def download_file(cls, destination: pathlib.Path, file_definition: ArtifactFileD
url = urlparse(location)
bucket_name, path = url.netloc, url.path.lstrip("/")

remote_storage = boto3.resource("s3")
remote_storage = cls.get_boto_resource()
try:
# pylint: disable=no-member
bucket = remote_storage.Bucket(bucket_name)
Expand Down

0 comments on commit 13365ef

Please sign in to comment.