From 734bf1c181cbe87f70367b96e112d8399695fa2e Mon Sep 17 00:00:00 2001 From: EC2 Default User Date: Wed, 22 Oct 2025 20:36:29 +0000 Subject: [PATCH 1/5] set up a proof of concept --- docs/scripts/uploadBenchmark.py | 64 +++++++++++++++++++++++++++++---- 1 file changed, 57 insertions(+), 7 deletions(-) diff --git a/docs/scripts/uploadBenchmark.py b/docs/scripts/uploadBenchmark.py index 8a6205b5d..7d0ffca16 100644 --- a/docs/scripts/uploadBenchmark.py +++ b/docs/scripts/uploadBenchmark.py @@ -44,11 +44,12 @@ # trace.get_tracer_provider().add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) tracer = trace.get_tracer("my_tracer") -PARENT_PROJECT = "syn$FILL_ME_IN" +PARENT_PROJECT = "syn70749126" S3_BUCKET = "s3://$FILL_ME_IN" S3_PROFILE = "$FILL_ME_IN" MiB: int = 2**20 +GiB: int = 2**30 def create_folder_structure( @@ -342,6 +343,39 @@ def execute_sync_to_s3( ) # nosec print(f"\nTime to S3 sync: {perf_counter() - time_before_sync}") +async def upload_multi_files_under_folder(path: str): + time_before_uploading_files = perf_counter() + # Create a project + root_project = Project(id=PARENT_PROJECT) + files = [] + for directory_path, directory_names, file_names in os.walk(path): + # Replicate the files on Synapse + for filename in file_names: + filepath = os.path.join(directory_path, filename) + file = File( + path=filepath, + ) + files.append(file) + + # Set the files attribute directly + root_project.files = files + + # Store the project with all files + stored_project = await root_project.store_async() + return stored_project + +def execute_file_upload_test(path: str, test_name: str) -> None: + """Executes the file upload test. + + :param path: The path to the root directory + :param test_name: The name of the test to add to the span name + """ + with tracer.start_as_current_span(f"file_upload__{test_name}"): + asyncio.run(upload_multi_files_under_folder(path)) + + print( + f"\nTime to upload multiple files: {perf_counter() - time_before_uploading_files}" + ) def execute_test_suite( path: str, @@ -361,7 +395,7 @@ def execute_test_suite( # Cleanup can be changed to delete_local=True when we want to clear the files out # This can be kept as False to allow multiple tests with the same file/folder # structure to re-use the files on Disk. - cleanup(path=path, delete_synapse=True, delete_s3=True, delete_local=False) + # cleanup(path=path, delete_synapse=True, delete_s3=True, delete_local=False) _, total_files, _ = create_folder_structure( path=path, depth_of_directory_tree=depth_of_directory_tree, @@ -370,6 +404,7 @@ def execute_test_suite( total_size_of_files_mib=total_size_of_files_mib, ) test_name = f"{total_files}_files_{total_size_of_files_mib}MiB" + execute_file_upload_test(path, test_name) # execute_synapseutils_test(path, test_name) @@ -380,17 +415,17 @@ def execute_test_suite( # execute_sync_to_s3(path, test_name) -syn = synapseclient.Synapse(debug=False) +syn = synapseclient.Synapse(debug=False, http_timeout_seconds=700) root_path = os.path.expanduser("~/benchmarking") # Log-in with ~.synapseConfig `authToken` syn.login() -print("25 Files - 25MiB") -# 25 Files - 25MiB ----------------------------------------------------------------------- +print("1 File - 10GB") +# 1 Files - 10GB ----------------------------------------------------------------------- depth = 1 sub_directories = 1 -files_per_directory = 25 -size_mib = 25 +files_per_directory = 1 +size_mib = 10 * 1024 execute_test_suite( path=root_path, @@ -400,6 +435,21 @@ def execute_test_suite( total_size_of_files_mib=size_mib, ) +# print("25 Files - 25MiB") +# 25 Files - 25MiB ----------------------------------------------------------------------- +# depth = 1 +# sub_directories = 1 +# files_per_directory = 25 +# size_mib = 25 + +# execute_test_suite( +# path=root_path, +# depth_of_directory_tree=depth, +# num_sub_directories=sub_directories, +# num_files_per_directory=files_per_directory, +# total_size_of_files_mib=size_mib, +# ) + # print("1 Files - 10MiB") # ## 1 Files - 10MiB ----------------------------------------------------------------------- # depth = 1 From 7bb66760d13d962f2faf9941c229739c33400a63 Mon Sep 17 00:00:00 2001 From: EC2 Default User Date: Wed, 22 Oct 2025 20:38:29 +0000 Subject: [PATCH 2/5] change position of the timer --- docs/scripts/uploadBenchmark.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/scripts/uploadBenchmark.py b/docs/scripts/uploadBenchmark.py index 7d0ffca16..1ec474c5a 100644 --- a/docs/scripts/uploadBenchmark.py +++ b/docs/scripts/uploadBenchmark.py @@ -344,7 +344,6 @@ def execute_sync_to_s3( print(f"\nTime to S3 sync: {perf_counter() - time_before_sync}") async def upload_multi_files_under_folder(path: str): - time_before_uploading_files = perf_counter() # Create a project root_project = Project(id=PARENT_PROJECT) files = [] @@ -371,6 +370,7 @@ def execute_file_upload_test(path: str, test_name: str) -> None: :param test_name: The name of the test to add to the span name """ with tracer.start_as_current_span(f"file_upload__{test_name}"): + time_before_uploading_files = perf_counter() asyncio.run(upload_multi_files_under_folder(path)) print( From 61e41f3bb48ff12c6ef9671fe716387f1bcef7e1 Mon Sep 17 00:00:00 2001 From: EC2 Default User Date: Thu, 23 Oct 2025 20:34:43 +0000 Subject: [PATCH 3/5] update script --- docs/scripts/uploadBenchmark.py | 72 +++++++++++++++++++++------------ 1 file changed, 46 insertions(+), 26 deletions(-) diff --git a/docs/scripts/uploadBenchmark.py b/docs/scripts/uploadBenchmark.py index 1ec474c5a..8e0b1a427 100644 --- a/docs/scripts/uploadBenchmark.py +++ b/docs/scripts/uploadBenchmark.py @@ -38,18 +38,21 @@ # from opentelemetry.sdk.resources import SERVICE_NAME, Resource # from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter -# trace.set_tracer_provider( -# TracerProvider(resource=Resource(attributes={SERVICE_NAME: "upload_benchmarking"})) -# ) -# trace.get_tracer_provider().add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) +# os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = "https://ingest.us.signoz.cloud" +# os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = "signoz-ingestion-key=" +# os.environ["OTEL_SERVICE_INSTANCE_ID"] = "local" + +trace.set_tracer_provider( + TracerProvider(resource=Resource(attributes={SERVICE_NAME: "upload_benchmarking"})) +) +trace.get_tracer_provider().add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) tracer = trace.get_tracer("my_tracer") -PARENT_PROJECT = "syn70749126" +PARENT_PROJECT = "syn18342812" S3_BUCKET = "s3://$FILL_ME_IN" S3_PROFILE = "$FILL_ME_IN" MiB: int = 2**20 -GiB: int = 2**30 def create_folder_structure( @@ -105,11 +108,21 @@ def create_folder_structure( print(f"total_size_of_files_bytes: {total_size_of_files_bytes}") print(f"size_of_each_file_bytes: {size_of_each_file_bytes}") + if total_size_of_files_mib >= 1024 * 5: + print( + f"total_size_of_files_gib: {total_size_of_files_bytes / (1024 * MiB)}" + ) + # 32 MiB chunks + chunk_size = 32 * MiB + else: + # 1 MiB chunks + chunk_size = MiB # size of each chunk in bytes + def create_files_in_current_dir(path_to_create_files: str) -> None: for i in range(1, num_files_per_directory + 1): - chunk_size = MiB # size of each chunk in bytes num_chunks = size_of_each_file_bytes // chunk_size filename = os.path.join(path_to_create_files, f"file{i}.txt") + # when the file size is right, just modify the beginning to refresh the file if ( os.path.isfile(filename) and os.path.getsize(filename) == size_of_each_file_bytes @@ -117,6 +130,7 @@ def create_files_in_current_dir(path_to_create_files: str) -> None: with open(filename, "r+b") as f: f.seek(0) f.write(os.urandom(chunk_size)) + # if the file doesn't exist or the size is wrong, create it from scratch else: if os.path.isfile(filename): os.remove(filename) @@ -213,6 +227,7 @@ def execute_synapseutils_test( manifestFile=manifest_path, sendMessages=False, ) + print( f"\nTime to sync to Synapse: {perf_counter() - time_before_syncToSynapse}" ) @@ -364,7 +379,7 @@ async def upload_multi_files_under_folder(path: str): return stored_project def execute_file_upload_test(path: str, test_name: str) -> None: - """Executes the file upload test. + """Executes the file upload stress test. :param path: The path to the root directory :param test_name: The name of the test to add to the span name @@ -403,8 +418,13 @@ def execute_test_suite( num_files_per_directory=num_files_per_directory, total_size_of_files_mib=total_size_of_files_mib, ) - test_name = f"{total_files}_files_{total_size_of_files_mib}MiB" - execute_file_upload_test(path, test_name) + + if total_size_of_files_mib >= 1024: + test_name = f"{total_files}_files_{total_size_of_files_mib // 1024}GiB" + else: + test_name = f"{total_files}_files_{total_size_of_files_mib}MiB" + + # execute_file_upload_test(path, test_name) # execute_synapseutils_test(path, test_name) @@ -418,22 +438,7 @@ def execute_test_suite( syn = synapseclient.Synapse(debug=False, http_timeout_seconds=700) root_path = os.path.expanduser("~/benchmarking") # Log-in with ~.synapseConfig `authToken` -syn.login() - -print("1 File - 10GB") -# 1 Files - 10GB ----------------------------------------------------------------------- -depth = 1 -sub_directories = 1 -files_per_directory = 1 -size_mib = 10 * 1024 - -execute_test_suite( - path=root_path, - depth_of_directory_tree=depth, - num_sub_directories=sub_directories, - num_files_per_directory=files_per_directory, - total_size_of_files_mib=size_mib, -) +syn.login(profile="dev") # print("25 Files - 25MiB") # 25 Files - 25MiB ----------------------------------------------------------------------- @@ -585,3 +590,18 @@ def execute_test_suite( # num_files_per_directory=files_per_directory, # total_size_of_files_mib=size_mib, # ) + +print("45 File - 100GB") +# 45 Files - 100GB ----------------------------------------------------------------------- +depth = 1 +sub_directories = 1 +files_per_directory = 45 +size_mib = 100 * 1024 * 45 + +execute_test_suite( + path=root_path, + depth_of_directory_tree=depth, + num_sub_directories=sub_directories, + num_files_per_directory=files_per_directory, + total_size_of_files_mib=size_mib, +) From 5565daa8860b14fc7972d5370460544940de84a8 Mon Sep 17 00:00:00 2001 From: EC2 Default User Date: Tue, 28 Oct 2025 15:48:41 +0000 Subject: [PATCH 4/5] update script to better log --- docs/scripts/uploadBenchmark.py | 45 +++++++++++++++++++++++++-------- 1 file changed, 34 insertions(+), 11 deletions(-) diff --git a/docs/scripts/uploadBenchmark.py b/docs/scripts/uploadBenchmark.py index 8e0b1a427..020dd95a0 100644 --- a/docs/scripts/uploadBenchmark.py +++ b/docs/scripts/uploadBenchmark.py @@ -173,7 +173,7 @@ def cleanup( ["aws", "s3", "rm", S3_BUCKET, "--recursive", "--profile", S3_PROFILE] ) # nosec if delete_synapse: - for child in syn.getChildren(PARENT_PROJECT, includeTypes=["folder"]): + for child in syn.getChildren(PARENT_PROJECT, includeTypes=["folder", "file"]): syn.delete(child["id"]) syn.cache.purge(after_date=datetime.datetime(2021, 1, 1)) @@ -358,39 +358,62 @@ def execute_sync_to_s3( ) # nosec print(f"\nTime to S3 sync: {perf_counter() - time_before_sync}") -async def upload_multi_files_under_folder(path: str): +async def upload_multi_files_under_folder(path: str, total_files: int = 1) -> Project: # Create a project root_project = Project(id=PARENT_PROJECT) files = [] + i = total_files for directory_path, directory_names, file_names in os.walk(path): # Replicate the files on Synapse for filename in file_names: + if i > total_files: + break + time_before_uploading_files = perf_counter() filepath = os.path.join(directory_path, filename) file = File( path=filepath, ) files.append(file) - + print(f"Time to prepare file {filename} for upload") + i += 1 + # Set the files attribute directly root_project.files = files # Store the project with all files + print('About to store the file...') stored_project = await root_project.store_async() return stored_project -def execute_file_upload_test(path: str, test_name: str) -> None: +def execute_file_upload_test(path: str, test_name: str, total_files: int) -> None: """Executes the file upload stress test. :param path: The path to the root directory :param test_name: The name of the test to add to the span name """ with tracer.start_as_current_span(f"file_upload__{test_name}"): + start_time = datetime.datetime.now() time_before_uploading_files = perf_counter() - asyncio.run(upload_multi_files_under_folder(path)) + + # Write start time to log file + log_file_path = os.path.expanduser("~/upload_benchmark_times.log") + with open(log_file_path, "a") as log_file: + log_file.write(f"Test: {test_name}\n") + log_file.write(f"Start time: {start_time.strftime('%Y-%m-%d %H:%M:%S')}\n") + + print(f"File upload test started at: {start_time.strftime('%Y-%m-%d %H:%M:%S')}") + asyncio.run(upload_multi_files_under_folder(path, total_files=total_files)) - print( - f"\nTime to upload multiple files: {perf_counter() - time_before_uploading_files}" - ) + end_time = datetime.datetime.now() + duration = perf_counter() - time_before_uploading_files + print(f"Time to upload multiple files: {duration}") + + + # Write end time and duration to log file + with open(log_file_path, "a") as log_file: + log_file.write(f"End time: {end_time.strftime('%Y-%m-%d %H:%M:%S')}\n") + log_file.write(f"Duration: {duration:.2f} seconds\n") + log_file.write("-" * 50 + "\n") def execute_test_suite( path: str, @@ -410,7 +433,7 @@ def execute_test_suite( # Cleanup can be changed to delete_local=True when we want to clear the files out # This can be kept as False to allow multiple tests with the same file/folder # structure to re-use the files on Disk. - # cleanup(path=path, delete_synapse=True, delete_s3=True, delete_local=False) + cleanup(path=path, delete_synapse=True, delete_s3=False, delete_local=False) _, total_files, _ = create_folder_structure( path=path, depth_of_directory_tree=depth_of_directory_tree, @@ -424,7 +447,7 @@ def execute_test_suite( else: test_name = f"{total_files}_files_{total_size_of_files_mib}MiB" - # execute_file_upload_test(path, test_name) + execute_file_upload_test(path, test_name, total_files) # execute_synapseutils_test(path, test_name) @@ -435,7 +458,7 @@ def execute_test_suite( # execute_sync_to_s3(path, test_name) -syn = synapseclient.Synapse(debug=False, http_timeout_seconds=700) +syn = synapseclient.Synapse(debug=True, http_timeout_seconds=60000000) root_path = os.path.expanduser("~/benchmarking") # Log-in with ~.synapseConfig `authToken` syn.login(profile="dev") From f6130644dc3043cc0677b8cce282adfef4317a0c Mon Sep 17 00:00:00 2001 From: EC2 Default User Date: Fri, 7 Nov 2025 15:55:27 +0000 Subject: [PATCH 5/5] update function to upload files sequentially --- docs/scripts/uploadBenchmark.py | 228 ++++++++++++++++++++------------ 1 file changed, 143 insertions(+), 85 deletions(-) diff --git a/docs/scripts/uploadBenchmark.py b/docs/scripts/uploadBenchmark.py index 020dd95a0..d90a8411c 100644 --- a/docs/scripts/uploadBenchmark.py +++ b/docs/scripts/uploadBenchmark.py @@ -19,6 +19,7 @@ import asyncio import datetime +import logging import os import shutil import subprocess # nosec @@ -48,7 +49,7 @@ trace.get_tracer_provider().add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) tracer = trace.get_tracer("my_tracer") -PARENT_PROJECT = "syn18342812" +PARENT_PROJECT = "syn70984427" S3_BUCKET = "s3://$FILL_ME_IN" S3_PROFILE = "$FILL_ME_IN" @@ -108,15 +109,7 @@ def create_folder_structure( print(f"total_size_of_files_bytes: {total_size_of_files_bytes}") print(f"size_of_each_file_bytes: {size_of_each_file_bytes}") - if total_size_of_files_mib >= 1024 * 5: - print( - f"total_size_of_files_gib: {total_size_of_files_bytes / (1024 * MiB)}" - ) - # 32 MiB chunks - chunk_size = 32 * MiB - else: - # 1 MiB chunks - chunk_size = MiB # size of each chunk in bytes + chunk_size = MiB # size of each chunk in bytes def create_files_in_current_dir(path_to_create_files: str) -> None: for i in range(1, num_files_per_directory + 1): @@ -227,7 +220,7 @@ def execute_synapseutils_test( manifestFile=manifest_path, sendMessages=False, ) - + print( f"\nTime to sync to Synapse: {perf_counter() - time_before_syncToSynapse}" ) @@ -293,6 +286,75 @@ def execute_walk_test( ) +def execute_walk_file_sequential( + path: str, + test_name: str, +) -> None: + """Execute the test that uses os.walk to sync all files/folders to synapse. + + Arguments: + path: The path to the root directory + test_name: The name of the test to add to the span name + """ + with tracer.start_as_current_span(f"manual_walk__{test_name}"): + time_before_walking_tree = perf_counter() + + # Create descriptive log file name with timestamp + timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + log_file_path = os.path.expanduser( + f"~/upload_benchmark_{test_name}_{timestamp}.log" + ) + with open(log_file_path, "a") as log_file: + log_file.write(f"Test: {test_name}\n") + start_time = datetime.datetime.now() + log_file.write(f"Start time: {start_time.strftime('%Y-%m-%d %H:%M:%S')}\n") + + # Create a simple parent lookup + parents = {path: PARENT_PROJECT} + + for directory_path, directory_names, file_names in os.walk(path): + # Create folders on Synapse first + for directory_name in directory_names: + folder_path = os.path.join(directory_path, directory_name) + parent_id = parents[directory_path] + + new_folder = Folder(name=directory_name, parent_id=parent_id) + # Store each folder immediately and save its Synapse ID + stored_folder = asyncio.run(new_folder.store_async()) + parents[folder_path] = stored_folder.id + + # Upload files one by one + for filename in file_names: + filepath = os.path.join(directory_path, filename) + parent_id = parents[directory_path] + + new_file = File( + path=filepath, + parent_id=parent_id, + annotations={ + "annot1": "value1", + "annot2": 1, + "annot3": 1.2, + "annot4": True, + "annot5": "2020-01-01", + }, + description="This is a Test File", + ) + # Upload this single file immediately + asyncio.run(new_file.store_async()) + + # Write end time and duration to log file + with open(log_file_path, "a") as log_file: + end_time = datetime.datetime.now() + duration = perf_counter() - time_before_walking_tree + log_file.write(f"End time: {end_time.strftime('%Y-%m-%d %H:%M:%S')}\n") + log_file.write(f"Duration: {duration:.2f} seconds\n") + log_file.write("-" * 50 + "\n") + print( + f"\nTime to walk and sync tree sequentially: {perf_counter() - time_before_walking_tree}" + ) + + def execute_walk_test_oop( path: str, test_name: str, @@ -306,6 +368,16 @@ def execute_walk_test_oop( with tracer.start_as_current_span(f"manual_walk__{test_name}"): time_before_walking_tree = perf_counter() + # Create descriptive log file name with timestamp + timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + log_file_path = os.path.expanduser( + f"~/upload_benchmark_{test_name}_{timestamp}.log" + ) + with open(log_file_path, "a") as log_file: + log_file.write(f"Test: {test_name}\n") + start_time = datetime.datetime.now() + log_file.write(f"Start time: {start_time.strftime('%Y-%m-%d %H:%M:%S')}\n") + root_project = Project(id=PARENT_PROJECT) parents = {path: root_project} for directory_path, directory_names, file_names in os.walk(path): @@ -334,6 +406,14 @@ def execute_walk_test_oop( ) parent_container.files.append(new_file) asyncio.run(root_project.store_async()) + + # Write end time and duration to log file + with open(log_file_path, "a") as log_file: + end_time = datetime.datetime.now() + duration = perf_counter() - time_before_walking_tree + log_file.write(f"End time: {end_time.strftime('%Y-%m-%d %H:%M:%S')}\n") + log_file.write(f"Duration: {duration:.2f} seconds\n") + log_file.write("-" * 50 + "\n") print( f"\nTime to walk and sync tree - OOP: {perf_counter() - time_before_walking_tree}" ) @@ -358,62 +438,6 @@ def execute_sync_to_s3( ) # nosec print(f"\nTime to S3 sync: {perf_counter() - time_before_sync}") -async def upload_multi_files_under_folder(path: str, total_files: int = 1) -> Project: - # Create a project - root_project = Project(id=PARENT_PROJECT) - files = [] - i = total_files - for directory_path, directory_names, file_names in os.walk(path): - # Replicate the files on Synapse - for filename in file_names: - if i > total_files: - break - time_before_uploading_files = perf_counter() - filepath = os.path.join(directory_path, filename) - file = File( - path=filepath, - ) - files.append(file) - print(f"Time to prepare file {filename} for upload") - i += 1 - - # Set the files attribute directly - root_project.files = files - - # Store the project with all files - print('About to store the file...') - stored_project = await root_project.store_async() - return stored_project - -def execute_file_upload_test(path: str, test_name: str, total_files: int) -> None: - """Executes the file upload stress test. - - :param path: The path to the root directory - :param test_name: The name of the test to add to the span name - """ - with tracer.start_as_current_span(f"file_upload__{test_name}"): - start_time = datetime.datetime.now() - time_before_uploading_files = perf_counter() - - # Write start time to log file - log_file_path = os.path.expanduser("~/upload_benchmark_times.log") - with open(log_file_path, "a") as log_file: - log_file.write(f"Test: {test_name}\n") - log_file.write(f"Start time: {start_time.strftime('%Y-%m-%d %H:%M:%S')}\n") - - print(f"File upload test started at: {start_time.strftime('%Y-%m-%d %H:%M:%S')}") - asyncio.run(upload_multi_files_under_folder(path, total_files=total_files)) - - end_time = datetime.datetime.now() - duration = perf_counter() - time_before_uploading_files - print(f"Time to upload multiple files: {duration}") - - - # Write end time and duration to log file - with open(log_file_path, "a") as log_file: - log_file.write(f"End time: {end_time.strftime('%Y-%m-%d %H:%M:%S')}\n") - log_file.write(f"Duration: {duration:.2f} seconds\n") - log_file.write("-" * 50 + "\n") def execute_test_suite( path: str, @@ -447,7 +471,7 @@ def execute_test_suite( else: test_name = f"{total_files}_files_{total_size_of_files_mib}MiB" - execute_file_upload_test(path, test_name, total_files) + execute_walk_file_sequential(path, test_name) # execute_synapseutils_test(path, test_name) @@ -458,10 +482,12 @@ def execute_test_suite( # execute_sync_to_s3(path, test_name) -syn = synapseclient.Synapse(debug=True, http_timeout_seconds=60000000) -root_path = os.path.expanduser("~/benchmarking") +syn = synapseclient.Synapse(debug=True, http_timeout_seconds=600) +synapseclient.Synapse.enable_open_telemetry() +root_path = os.path.expanduser("~/benchmarking3") + # Log-in with ~.synapseConfig `authToken` -syn.login(profile="dev") +syn.login() # print("25 Files - 25MiB") # 25 Files - 25MiB ----------------------------------------------------------------------- @@ -614,17 +640,49 @@ def execute_test_suite( # total_size_of_files_mib=size_mib, # ) -print("45 File - 100GB") -# 45 Files - 100GB ----------------------------------------------------------------------- -depth = 1 -sub_directories = 1 -files_per_directory = 45 -size_mib = 100 * 1024 * 45 - -execute_test_suite( - path=root_path, - depth_of_directory_tree=depth, - num_sub_directories=sub_directories, - num_files_per_directory=files_per_directory, - total_size_of_files_mib=size_mib, -) +# print("4 Files - 400GB") +# # 4 Files - 400GB ----------------------------------------------------------------------- +# depth = 1 +# sub_directories = 1 +# files_per_directory = 4 +# size_mib = 4 * 100 * 1024 + +# execute_test_suite( +# path=root_path, +# depth_of_directory_tree=depth, +# num_sub_directories=sub_directories, +# num_files_per_directory=files_per_directory, +# total_size_of_files_mib=size_mib, +# ) + + +# print("45 File - 100GB") +# # 45 File - 100GB ----------------------------------------------------------------------- +# depth = 1 +# sub_directories = 1 +# files_per_directory = 45 +# size_mib = 45 * 100 * 1024 + +# execute_test_suite( +# path=root_path, +# depth_of_directory_tree=depth, +# num_sub_directories=sub_directories, +# num_files_per_directory=files_per_directory, +# total_size_of_files_mib=size_mib, +# ) + + +# print("4 Files - 1MB") +# # 4 Files - 1MB ----------------------------------------------------------------------- +# depth = 1 +# sub_directories = 1 +# files_per_directory = 4 +# size_mib = 4 * 1024 + +# execute_test_suite( +# path=root_path, +# depth_of_directory_tree=depth, +# num_sub_directories=sub_directories, +# num_files_per_directory=files_per_directory, +# total_size_of_files_mib=size_mib, +# )