Skip to content

Commit

Permalink
decouple read size from write size in ddb model
Browse files Browse the repository at this point in the history
  • Loading branch information
william-schor committed Feb 15, 2024
1 parent 6dcf066 commit e7f1d06
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 4 deletions.
13 changes: 9 additions & 4 deletions service_capacity_modeling/models/org/netflix/ddb.py
Expand Up @@ -200,11 +200,16 @@ def _get_write_consistency_percentages(
}


def _mean_item_size_bytes(desires: CapacityDesires) -> float:
def _mean_write_item_size_bytes(desires: CapacityDesires) -> float:
mean_item_size = desires.query_pattern.estimated_mean_write_size_bytes.mid
return mean_item_size


def _mean_read_item_size_bytes(desires: CapacityDesires) -> float:
mean_item_size = desires.query_pattern.estimated_mean_read_size_bytes.mid
return mean_item_size


def _get_dynamo_standard(context: RegionContext) -> Service:
number_of_regions = context.num_regions
dynamo_service = (
Expand Down Expand Up @@ -238,7 +243,7 @@ def _plan_writes(
desires: CapacityDesires,
extra_model_arguments: Dict[str, Any],
) -> _WritePlan:
mean_item_size = _mean_item_size_bytes(desires)
mean_item_size = _mean_write_item_size_bytes(desires)

# For items up to 1 KB in size,
# one WCU can perform one standard write request per second
Expand Down Expand Up @@ -305,7 +310,7 @@ def _plan_reads(
transactional_read_percent = read_percentages["transactional_read_percent"]
eventual_read_percent = read_percentages["eventual_read_percent"]
strong_read_percent = read_percentages["strong_read_percent"]
mean_item_size = _mean_item_size_bytes(desires)
mean_item_size = _mean_read_item_size_bytes(desires)

# items up to 4 KB in size
rounded_rcus_per_item = math.ceil(max(1.0, mean_item_size / (4 * 1024)))
Expand Down Expand Up @@ -377,7 +382,7 @@ def _plan_data_transfer(
return _DataTransferPlan(
total_data_transfer_gib=0, total_annual_data_transfer_cost=0
)
mean_item_size_bytes = _mean_item_size_bytes(desires)
mean_item_size_bytes = _mean_write_item_size_bytes(desires)
writes_per_second = desires.query_pattern.estimated_write_per_second.mid
# 31,536,000 seconds in a year (365 * 24 * 60 * 60)
# 1024 * 1024 * 1024 = 1Gib
Expand Down
2 changes: 2 additions & 0 deletions tests/netflix/test_ddb.py
Expand Up @@ -53,6 +53,7 @@
),
estimated_write_per_second=certain_int(0),
estimated_mean_write_size_bytes=certain_int(5798),
estimated_mean_read_size_bytes=certain_int(5798),
),
data_shape=DataShape(
estimated_state_size_gib=Interval(low=10, mid=100, high=1000, confidence=0.98),
Expand All @@ -78,6 +79,7 @@
low=100, mid=1000, high=10000, confidence=0.98
),
estimated_mean_write_size_bytes=certain_int(5798),
estimated_mean_read_size_bytes=certain_int(5798),
),
data_shape=DataShape(
estimated_state_size_gib=Interval(low=10, mid=100, high=1000, confidence=0.98),
Expand Down

0 comments on commit e7f1d06

Please sign in to comment.