const throttlingConfig = {
  DE: { 
    sample_percent: 100,
    start_hour: 12,
    end_hour: 23,
    pace: 1875 
  },
  US: {
    sample_percent: 100,
    start_hour: 0,
    end_hour: 24, // This is all day for testing purposes
    pace: 2750 
  }
};


// TODO implement subnet matching
const throttlingExclusionAcl = [ "10.10.10.1" ];

addEventListener("fetch", event => {
  // Get the request from the client.
  const req = event.request;
  const address = event.client.address;
  const countryCode = event.client.geo.country_code;
  const throttlingCountry = throttlingConfig[countryCode];
 
  // Send the request to `origin_0`.
  let backendResponse = fetch(req, {
    backend: "origin_0"
  });

  if (!throttlingExclusionAcl.includes(address) &&
      throttlingCountry != undefined) {
        // Check sampling and hours
        if (throttlingCountry.sample_percent > getRandomInt(99)) {
          let currentHour = (new Date()).getHours();
          let startHour = throttlingCountry.start_hour;
          let endHour = throttlingCountry.end_hour;
          if (startHour>endHour) {
            endHour += 24;
            if (currentHour<startHour) {
              currentHour += 24;
            }
          }
          if (currentHour > startHour && currentHour<endHour) {
            const pit = tarpit(100, throttlingCountry.pace/0.01); // send 1/10 of a chunk every 100ms
            console.log("Pace limit in effect: ip=" + address + " country=" + countryCode + " pace=" + throttlingCountry.pace);
            backendResponse = pit(backendResponse);
          }
        }
  }

  // Send the backend response back to the client.
  event.respondWith(backendResponse);
});

function getRandomInt(max) {
  return Math.floor(Math.random() * max);
}

async function pause(delay = 100) {
  return new Promise(resolve => setTimeout(resolve, delay))
}

// See https://developer.fastly.com/solutions/examples/slowing-down-responses-tarpit/
function tarpit(millisecondDelay = 100, bytesPerChunk = 1000) {
  const writableStrategy = new ByteLengthQueuingStrategy({ highWaterMark: bytesPerChunk });

  const {readable, writable} = new TransformStream();
  const writer = writable.getWriter();

  async function processChunks(readStream) {
    const reader = readStream.getReader();
    while(true) {
      const {done, value: chunk} = await reader.read();
      if (done) {break;}
      await pause(millisecondDelay);
      writer.write(chunk);
    }
    writer.close();
    reader.releaseLock();
  }

  return function(response) { 
    if (response.body) {
      processChunks(response.body.pipeThrough(new TransformStream(undefined, writableStrategy)));
      return new Response(readable, response);
    }
    return response;
  }
}