Skip to content

Commit

Permalink
misc fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
ZainRizvi committed May 3, 2023
1 parent e6850ae commit cde3689
Show file tree
Hide file tree
Showing 6 changed files with 109 additions and 4 deletions.
6 changes: 6 additions & 0 deletions .github/scripts/pytest_cache.py
Expand Up @@ -26,6 +26,12 @@ def main():
# TODO: First check if it's even worth uploading a new cache:
# Does the cache even mark any failed tests?

# verify the cache dir exists
if not os.path.exists(args.cache_dir):
# raise an invalid input exception
raise ValueError(f"The given pytest cache dir `{args.cache_dir}` does not exist")

print (os.getenv("AWS_ACCESS_KEY_ID"))
upload_pytest_cache(
pr_identifier=args.pr_identifier,
workflow=args.workflow,
Expand Down
18 changes: 16 additions & 2 deletions .github/scripts/pytest_caching_utils.py
@@ -1,11 +1,12 @@
import os
import shutil
import contextlib

from s3_upload_utils import *

PYTEST_CACHE_KEY_PREFIX = "pytest_cache"
PYTEST_CACHE_DIR_NAME = ".pytest_cache"
BUCKET = "pytest_cache"
BUCKET = "pytest-cache"
TEMP_DIR = "/tmp" # a backup location in case one isn't provided

def get_sanitized_pr_identifier(pr_identifier):
Expand Down Expand Up @@ -55,8 +56,15 @@ def upload_pytest_cache(pr_identifier, workflow, job, shard, cache_dir, bucket=B
pr_identifier: A unique, human readable identifier for the PR
job: The name of the job that is uploading the cache
"""

if not bucket:
bucket = BUCKET
if not temp_dir:
temp_dir = TEMP_DIR

obj_key_prefix = get_s3_key_prefix(pr_identifier, workflow, job, shard)
zip_file_path_base = f"{temp_dir}/zip-upload/{obj_key_prefix}" # doesn't include the extension
zip_file_path = ""

try:
zip_file_path = zip_folder(cache_dir, zip_file_path_base)
Expand All @@ -65,10 +73,16 @@ def upload_pytest_cache(pr_identifier, workflow, job, shard, cache_dir, bucket=B
finally:
if zip_file_path:
print(f"Deleting {zip_file_path}")
os.remove(zip_file_path)
with contextlib.suppress(FileNotFoundError):
os.remove(zip_file_path)

def download_pytest_cache(pr_identifier, workflow, job, dest_cache_dir, bucket=BUCKET, temp_dir=TEMP_DIR):

if not bucket:
bucket = BUCKET
if not temp_dir:
temp_dir = TEMP_DIR

obj_key_prefix = get_s3_key_prefix(pr_identifier, workflow, job)

zip_download_dir = f"{temp_dir}/cache-zip-downloads/{obj_key_prefix}"
Expand Down
84 changes: 84 additions & 0 deletions .github/scripts/pytest_gen_test_data.py
@@ -0,0 +1,84 @@
import os
import shutil
import contextlib

from s3_upload_utils import *

PYTEST_CACHE_KEY_PREFIX = "pytest_cache"
PYTEST_CACHE_DIR_NAME = ".pytest_cache"
BUCKET = "pytest_cache"
TEMP_DIR = "/tmp" # a backup location in case one isn't provided


def create_test_files_in_folder(pytest_cache_dir):
import random
import os

# delete anything currently in pytest_cache_dir
if os.path.exists(pytest_cache_dir):
shutil.rmtree(pytest_cache_dir)

# make sure folder exists
subdir = f"{pytest_cache_dir}/v/cache/"
ensure_dir_exists(subdir)

failed_tests_file = os.path.join(subdir, "lastfailed")
failed_tests = {}

# random integer from 100 to 300
data_id = random.randint(100, 300)

for test_num in range(10): # number of tests to fail
test_name = f"test_id_{data_id}__failes_num_{test_num}"
failed_tests[test_name] = True

write_json_file(failed_tests_file, failed_tests)
print(f"Created file {failed_tests_file}")

other_cache_files = [
"v/cache/file1",
"v/cache/file2",
"v/cache/file3",
"v/cache/file4",
"randofile1",
"randofile2",
"randofile3",
"randofile4",
"randofile5",
]

# These are all files we assume are irrelevant to us, but we'll copy them over if they exist
for file_name in other_cache_files:
# Don't generate all the files. Only generate ~half of them
if random.randint(0, len(other_cache_files)) >= (len(other_cache_files) // 2):
continue

file_path = os.path.join(pytest_cache_dir, file_name)
with open(file_path, 'w') as f:
f.write(f"This is a test file from generateion {data_id}")
print("Created file {}".format(file_path))

if __name__ == '__main__':
folder = f"/Users/zainr/deleteme/test-files"
subfolder = f"{folder}/fake_pytest_cache"
create_test_files_in_folder(subfolder)
create_test_files_in_folder(subfolder + "2")


# download_s3_objects_with_prefix(BUCKET, "zipped_file/ff", "downloaded_files")

# unzip_folder("downloaded_files/zipped_file/ffzsome-job-btest-files.zip", "/Users/zainr/deleteme/ffunzip")

# pr_identifier = get_sanitized_pr_identifier("read-deal")
# print(pr_identifier)
# workflow = "test-workflow"
# job = "test-job-name"
# shard = "shard-3"
# cache_dir = f"/Users/zainr/test-infra/{PYTEST_CACHE_DIR_NAME}"
# upload_pytest_cache(pr_identifier, workflow, job, shard, cache_dir, BUCKET)

# temp_dir = "/Users/zainr/deleteme/tmp"

# cache_dir_new = f"/Users/zainr/deleteme/test_pytest_cache"
# download_pytest_cache(pr_identifier, workflow, job, cache_dir_new, BUCKET)

2 changes: 1 addition & 1 deletion .github/scripts/s3_upload_utils.py
Expand Up @@ -12,7 +12,7 @@ def zip_folder(folder_to_zip, dest_file_base_name):
if dest_file_base_name.endswith(".zip"):
dest_file_base_name = dest_file_base_name[:-4]

ensure_dir_exists(dest_file_base_name)
ensure_dir_exists(os.path.dirname(dest_file_base_name))

print(f"Zipping {folder_to_zip} to {dest_file_base_name}")
return shutil.make_archive(dest_file_base_name, 'zip', folder_to_zip)
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/_linux-test.yml
Expand Up @@ -223,7 +223,7 @@ jobs:
uses: ./.github/actions/pytest-cache-upload
if: always() && steps.test.conclusion && steps.test.conclusion != 'skipped'
with:
cache_dir: "${GITHUB_WORKSPACE}/.pytest_cache"
cache_dir: ${{ env.GITHUB_WORKSPACE }}/.pytest_cache
shard: ${{ matrix.shard }}

- name: Print remaining test logs
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Expand Up @@ -324,6 +324,7 @@ bazel-*
xla/

# direnv, posh-direnv
.env
.envrc
.psenvrc

Expand Down

0 comments on commit cde3689

Please sign in to comment.