declare local var.start_time TIME;
declare local var.end_time TIME;
declare local var.time_ratio FLOAT;

# Any changes to your configuration that changes the cache keys of
# objects will potentially cause your ENTIRE cached content to
# become unavailable, resulting in a large amount of requests
# to your origin.

# One of these changes is enabling segmented caching (SC),
# which segments your responses and caches them in chunks,
# causing the cache keys to change.

# Enabling SC in your configuration means every request will
# result in a MISS until the cache has been populated again.

# By spanning this configuration change over a longer period
# of time, you can spread out the extra load on your backends.

# This can also apply to other features being introduced to your
# services, such as request normalization
# (for example, sorting the query string to increase cache hits)

# Set the enabling process starting time
# Normally, you want to set a static start time like 1568073600,
# which is the timestamp of September 9, 2019 0:00:00 UTC
#   set var.start_time = std.integer2time(1567987200);

# But here for testing purpose, we set the enabling process time span 
# to 1 hour (3600 seconds) and the starting time to 30 minutes 
# (1800 seconds) ago, to get 50% of URL enabled with SC
set var.start_time = time.sub(now, 1800s);
set var.end_time = time.add(var.start_time, 3600s);

# Generate time ratio
set var.time_ratio = time.interval_elapsed_ratio(now, var.start_time, var.end_time);
log "Percentage of time passed: " var.time_ratio;


# Generate hash ratio from request URL, value between (0-1)
#  hash seed: 0
#  hash range: 0 - 1023
declare local var.hurl_ratio FLOAT;
set var.hurl_ratio = fastly.hash(req.url, 0, 0, 1023);
set var.hurl_ratio /= 1024;
log "This request URL Hash ratio is: " var.hurl_ratio;


if (var.time_ratio > var.hurl_ratio) {
	set req.enable_segmented_caching = true;
	log "Segmented caching is enabled";
}
else {
  log "Segmented caching is NOT enabled";
}