Skip to content

Commit

Permalink
Tune flux populate_metric_worker
Browse files Browse the repository at this point in the history
IssueID #3864: flux - try except everything
IssueID #3068: SNAB

- Bring into line with flux worker performance changes, tune flux
  populate_metric_worker like flux upload_data in terms of pickle limits
- Added bandit nosec on random.random()

Modified:
docs/upload-data-to-flux.rst
skyline/flux/populate_metric_worker.py
skyline/snab/snab_flux_load_test.py
  • Loading branch information
earthgecko committed Feb 2, 2021
1 parent 1cfb29a commit 1251d53
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 4 deletions.
2 changes: 1 addition & 1 deletion docs/upload-data-to-flux.rst
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ follows. Note that in this instance you would need a your
.. code-block:: python
FLUX_UPLOADS_KEYS = {
'temp_monitoring.warehouse.2.012383': '484166bf-df66-4f7d-ad4a-9336da9ef620',
'484166bf-df66-4f7d-ad4a-9336da9ef620': 'temp_monitoring.warehouse.2.012383',
}
curl request.
Expand Down
12 changes: 11 additions & 1 deletion skyline/flux/populate_metric_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,6 +489,7 @@ def pickle_data_to_graphite(data):
response = requests.get(fetch_url)
if response.status_code == 200:
success = True
logger.info('populate_metric_worker :: got responses from %s' % str(fetch_url))
except:
logger.info(traceback.format_exc())
logger.error('error :: populate_metric_worker :: http status code - %s, reason - %s' % (
Expand Down Expand Up @@ -817,7 +818,10 @@ def pickle_data_to_graphite(data):
try:
smallListOfMetricTuples.append(data)
tuples_added += 1
if tuples_added >= 1000:
# @modified 20210115 - Task #3864: flux - try except everything
# Bring into line with flux worker performance changes
# if tuples_added >= 1000:
if tuples_added >= 480:
pickle_data_sent = pickle_data_to_graphite(smallListOfMetricTuples)
if pickle_data_sent:
data_points_sent += tuples_added
Expand All @@ -827,6 +831,12 @@ def pickle_data_to_graphite(data):
sent_to_graphite += len(smallListOfMetricTuples)
smallListOfMetricTuples = []
tuples_added = 0
# @added 20210115 - Task #3864: flux - try except everything
# Bring into line with flux worker performance changes
# Reduce the speed of submissions to Graphite
# if there are lots of data points
if timeseries_length > 4000:
sleep(0.3)
else:
logger.error('error :: populate_metric_worker :: failed to send %s data points to Graphite via pickle for %s' % (
str(tuples_added), metric))
Expand Down
4 changes: 2 additions & 2 deletions skyline/snab/snab_flux_load_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ def spin_snab_flux_load_test_process(self, current_timestamp):
while len(snab_flux_load_test_metrics) < SNAB_FLUX_LOAD_TEST_METRICS:
new_uuid = str(uuid.uuid4())
new_metric_uuid = new_uuid.replace('-', '.')
slot = str(round(random.random(), 2))
slot = str(round(random.random(), 2)) # nosec
new_metric = '%s.%s.%s' % (SNAB_FLUX_LOAD_TEST_NAMESPACE_PREFIX, slot, new_metric_uuid)
snab_flux_load_test_metrics.append(new_metric)
# Add to the snab_flux_load_test_metrics_set Redis set
Expand Down Expand Up @@ -304,7 +304,7 @@ def spin_snab_flux_load_test_process(self, current_timestamp):
'metrics': []
}
if post_count < SNAB_FLUX_LOAD_TEST_METRICS_PER_POST:
post_data_dict['metrics'].append({'metric': metric, 'timestamp': str(epoch_timestamp), 'value': str(round(random.random(), 2))})
post_data_dict['metrics'].append({'metric': metric, 'timestamp': str(epoch_timestamp), 'value': str(round(random.random(), 2))}) # nosec
post_count += 1
if post_count == SNAB_FLUX_LOAD_TEST_METRICS_PER_POST:
response = None
Expand Down

0 comments on commit 1251d53

Please sign in to comment.