Integrate via Terraform

Installation with Terraform

Prerequisites

Protect your traffic

  1. Download the latest version of our Cloudflare Worker script.
  2. Create an empty terraform file, for example datadome_worker.tf and paste the following code:
terraform {
  required_providers {
    cloudflare = {
      source  = "cloudflare/cloudflare"
      version = "~> 5"
    }
  }
}

variable "cloudflare_api_token" {}
variable "cloudflare_zone_id" {}
variable "cloudflare_account_id" {}
variable "cloudflare_pattern" {}
variable "datadome_server_side_key" {}
variable "datadome_client_side_key" {}

provider "cloudflare" {
  api_token = var.cloudflare_api_token
}

resource "cloudflare_worker" "datadome_worker" {
  account_id = var.cloudflare_account_id
  name       = "datadome_worker"
}

resource "cloudflare_workers_route" "catch_all_route" {
  zone_id = var.cloudflare_zone_id
  pattern = var.cloudflare_pattern
  script  = cloudflare_worker.datadome_worker.name
  depends_on = [cloudflare_workers_deployment.datadome_worker_deployment]
}

resource "cloudflare_worker_version" "datadome_worker_version" {
  account_id         = var.cloudflare_account_id
  worker_id          = cloudflare_worker.datadome_worker.id
  compatibility_date = "2025-04-14"
  main_module        = "datadome.js"

  modules = [{
    content_file = "dist/datadome.js"
    content_type = "application/javascript+module"
    name         = "datadome.js"
  }]

  bindings = [
    {
      name = "DATADOME_SERVER_SIDE_KEY"
      text = var.datadome_server_side_key
      type = "secret_text"
    },
    {
      name = "DATADOME_CLIENT_SIDE_KEY"
      text = var.datadome_client_side_key
      type = "secret_text"
    }
  ]
}

resource "cloudflare_workers_deployment" "datadome_worker_deployment" {
  account_id  = var.cloudflare_account_id
  script_name = cloudflare_worker.datadome_worker.name
  strategy    = "percentage"
  versions = [{
    percentage = 100
    version_id = cloudflare_worker_version.datadome_worker_version.id
  }]
}
terraform {
  required_providers {
    cloudflare = {
      source  = "cloudflare/cloudflare"
      version = "~> 4"
    }
  }
}

variable "cloudflare_api_token" {}
variable "cloudflare_zone_id" {}
variable "cloudflare_account_id" {}
variable "cloudflare_pattern" {}
variable "datadome_server_side_key" {}
variable "datadome_client_side_key" {}

provider "cloudflare" {
  api_token = var.cloudflare_api_token
}

resource "cloudflare_worker_route" "catch_all_route" {
  zone_id     = var.cloudflare_zone_id
  pattern     = var.cloudflare_pattern
  script_name = cloudflare_worker_script.datadome_worker.name
}

resource "cloudflare_worker_script" "datadome_worker" {
  account_id = var.cloudflare_account_id
  name       = "datadome_worker"
  content    = file("${path.module}/dist/datadome.js")

  module = true

  secret_text_binding {
    name = "DATADOME_SERVER_SIDE_KEY"
    text = var.datadome_server_side_key
  }

  secret_text_binding {
    name = "DATADOME_CLIENT_SIDE_KEY"
    text = var.datadome_client_side_key
  }
}

  bindings = [
    {
      name = "DATADOME_SERVER_SIDE_KEY"
      text = var.datadome_server_side_key
      type = "secret_text"
    },
    {
      name = "DATADOME_CLIENT_SIDE_KEY"
      text = var.datadome_client_side_key
      type = "secret_text"
    }
  ]
}

resource "cloudflare_workers_deployment" "datadome_worker_deployment" {
  account_id  = var.cloudflare_account_id
  script_name = cloudflare_worker.datadome_worker.name
  strategy    = "percentage"
  versions = [{
    percentage = 100
    version_id = cloudflare_worker_version.datadome_worker_version.id
  }]
}
  1. Export the mentioned variables (DataDome keys are available in your dashboard):
export TF_VAR_datadome_server_side_key=${DATADOME_SERVER_SIDE_KEY}
export TF_VAR_datadome_client_side_key=${DATADOME_CLIENT_SIDE_KEY}
export TF_VAR_cloudflare_api_token=${CLOUDFLARE_API_TOKEN}
export TF_VAR_cloudflare_zone_id=${CLOUDFLARE_ZONE_ID}
export TF_VAR_cloudflare_account_id=${CLOUDFLARE_ACCOUNT_ID}
export TF_VAR_cloudflare_pattern=${CLOUDFLARE_PATTERN}
  1. Run
terraform init
terraform plan
# Two ressources will be created: the Worker script and the Worker route.
terraform apply

Congrats! You can now see your traffic in your DataDome dashboard.

Configuration

The configuration is done inside the script, using constants.

Server-side settings

Setting name in Worker's codeDescriptionRequiredDefault valueExample
DATADOME_SERVER_SIDE_KEYYour DataDome server-side key, found in your dashboard.Yes--
DATADOME_TIMEOUTRequest timeout to DataDome API, in milliseconds.No300350
DATADOME_URL_REGEX_EXCLUSIONRegular expression to exclude URLs from the DataDome analysis.NoList of excluded static assets below-
DATADOME_URL_REGEX_INCLUSIONRegular expression to only include URLs in the DataDome analysed traffic.No-/login*/i
DATADOME_IP_EXCLUSIONList of IPs or CIDR from which traffic will be excluded from the DataDome analysis.No-["192.168.0.1", "192.168.0.2", "192.168.0.0/24"]
DATADOME_LOGPUSH_CONFIGURATIONList of Enriched headers names to log inside Logpush.No-["X-DataDome-botname", "X-DataDome-captchapassed", "X-DataDome-isbot"]
DATADOME_ENABLE_GRAPHQL_SUPPORTExtract GraphQL operation name and type on request to a /graphql endpoint to improve protection.Nofalsetrue
DATADOME_ENABLE_REFERRER_RESTORATIONSet to true to restore original referrer when a challenge is passed.Nofalsetrue
DATADOME_ENABLE_DEBUGGINGLog in Workers logs detailed information about the DataDome process.Nofalsetrue
DATADOME_ENABLE_MCP_SUPPORTBoolean to enable Model Context Protocol support.Nofalsetrue
DATADOME_CUSTOM_FIELD_STRING_1Static value or callback function to send a custom string. Refer to Add custom field section.No-
DATADOME_CUSTOM_FIELD_STRING_2Static value or callback function to send a custom string. Refer to Add custom field section.No-
DATADOME_CUSTOM_FIELD_STRING_3Static value or callback function to send a custom string. Refer to Add custom field section.No-
DATADOME_CUSTOM_FIELD_INTEGER_1Static value or callback function to send a custom integer. Refer to Add custom field section.No-
DATADOME_CUSTOM_FIELD_INTEGER_2Static value or callback function to send a custom integer. Refer to Add custom field section.No-
DATADOME_CUSTOM_FIELD_FLOAT_1Static value or callback function to send a custom float. Refer to Add custom field section.No-
 /\.(avi|flv|mka|mkv|mov|mp4|mpeg|mpg|mp3|flac|ogg|ogm|opus|wav|webm|webp|bmp|gif|ico|jpeg|jpg|png|svg|svgz|swf|eot|otf|ttf|woff|woff2|css|less|js|map)$/i

Client-side settings

Setting name in Worker's codeDescriptionRequiredDefault valueExample
DATADOME_CLIENT_SIDE_KEYYour DataDome client-side key, found in your dashboard.Yes--
DATADOME_JS_URLURL of the DataDome JS tag that can be changed to include the tag as a first party.Nohttps://js.datadome.co/tags.jshttps://ddfoo.com/tags.js
DATADOME_JS_ENDPOINTEndpoint of the DataDome JS Tag.No
DATADOME_JS_TAG_OPTIONSJSON object describing DataDome JS Tag options.No{ "ajaxListenerPath": true }{ "ajaxListenerPath": "example.com", "allowHtmlContentTypeOnCaptcha": true }
DATADOME_JS_URL_REGEX_EXCLUSIONRegular expression to NOT set the DataDome JS Tag on matching URLs.No--
DATADOME_JS_URL_REGEX_INCLUSIONRegular expression to set the DataDome JS Tag on matching URLs.No-/login*/i

Update with Terraform

  1. Download the latest version of our Cloudflare Worker script.
  2. Paste the content of the datadome.js file into the file used for the content of the script of your Worker.
  3. Run
terraform plan

1 ressource will be changed.

  1. Run
terraform apply

Uninstallation with Terraform

To delete the DataDome Worker and its script, run from the location of your datadome_worker.tf and terraform.tfstate:

terraform destroy -target cloudflare_worker_script.datadome_worker

Logging

DatDome custom logging

  1. Inside the Cloudflare Dashboard, go to the DataDome Worker's page.
  2. Click onSettings, go to the Observability section.
  3. Click on the pen icon next to Workers Logs.
  1. Enable logs.
  2. Click on Deploy.
  3. You will see the logs inside the Logs tab.
    By default, DataDome logs errors only (such as errors in the configuration). If you want to have detailed logs for debugging, you can set DATADOME_ENABLE_DEBUGGINGto true.

DataDome logs format

The DataDome custom logs have the following format:

{
  "step": "string",
  "result": "string", 
  "reason": "string",
  "details": {
    "key": "value"
  },
  "company": "DataDome",
  "line": 123
}

Logpush

You can use Logpush to send logs to a destination supported by Logpush (Datadog, Splunk, S3 Bucket…).

❗️

Cloudflare plan

Logpush is available to customers on Cloudflare’s Enterprise plan.

Update the Worker’s script

  1. Fill the DATADOME_LOGPUSH_CONFIGURATION value with the name of the values you want, as an Array of Strings.
    The possible values are available in the Enriched headers page.

Eg:

DATADOME_LOGPUSH_CONFIGURATION = "["X-DataDome-botname", "X-DataDome-isbot", "x-datadomeresponse"]"

Enable Logpush

  1. Inside the Cloudflare Dashboard, go to the DataDome Worker's page.
  2. Click onSettings, go to the Observability section.
  3. Click on Enable next to Logpush.

Advanced configuration

Add custom fields

Requires version 2.2.0 of the CloudflareWorker module.

📘

Custom fields feature

DataDome let you enrich in real time our detection engine by sending us some custom fields with your business data. These fields can be used for specific detection models.

👋 Please reach out to our support team for reviewing the data received.

Sample a callback function to set custom fields dynamically
The callback is a function receiving a single parameter, the Cloudflare Http request - defined here:

// Sample code for custom fields  
// Editing directly the datadome.js file  
var DATADOME_CUSTOM_FIELD_STRING_1 = function (request) {  
    if (request.headers['x-user-tier'] && request.headers['x-user-tier'][0] && request.headers['x-user-tier'][0].value) {  
        return request.headers['x-user-tier'][0].value;  
    } else {  
        return 'standard';  
    }  
};
// Sample code for custom fields  
// when calling activateDataDome
 const dataDomeHandler = activateDataDome(myHandler, {
   serverSideKey: env.DATADOME_SERVER_SIDE_KEY,
   clientSideKey: env.DATADOME_CLIENT_SIDE_KEY,
   customFieldString1: function (request) {
     if (request.headers['x-user-tier'] && request.headers['x-user-tier'][0] 
				&& request.headers['x-user-tier'][0].value) {  
       return request.headers['x-user-tier'][0].value;  
     } else {  
       return 'standard';  
     }
   }, 
   // ...other options
 });

Restore the referrer

After passing a DataDome challenge on browsers other than Firefox, the referrer value is updated to the current URL which can lead to inconsistent results in website analytics.

It is possible to restore the Referer header to its original value for your backend:

  • Contact our support team, they will review your requirements and provide you with the best recommendations
  • Set the boolean value of the DATADOME_ENABLE_REFERRER_RESTORATION option to true

Enable GraphQL support

It is possible to enable GraphQL support and extract the operation type and operation name from the request body from GraphQL requests.

You need to set the boolean value of the DATADOME_ENABLE_GRAPHQL_SUPPORT option to true

Once enabled, POST requests targeting a graphql endpoint with content-type: application/json will be analyzed to extract GraphQL operation name and type.

Enable MCP support

Starting from version 2.3.0 of the module, it is possible to enable support for the Model Context Protocol and extract information from the request body.

You need to set the boolean value of DATADOME_ENABLE_MCP_SUPPORT option to true.

Once enabled, requests targeting an endpoint with a /mcp route will be analyzed to extract MCP properties according to the official specification:

  • mcp-session-id and mcp-protocol-version headers
  • jsonRpcVersion, jsonRpcRequestId, and mcpMethod request body fields
  • mcpParamsClientInfoName and mcpParamsClientInfoVersion request body fields when the method is initialize
  • mcpParamsToolName request body field when the method is tools/call