diff --git a/Makefile b/Makefile index b94b0b4..7fa06e8 100644 --- a/Makefile +++ b/Makefile @@ -5,4 +5,4 @@ format: .PHONY: dev-start dev-start: @mkdir -p ./targets - poetry run python discoverecs.py --directory $$PWD/targets + poetry run python discoverecs.py --directory $$PWD/targets --default-scrape-interval-prefix default diff --git a/README.md b/README.md index f98c2bd..bdabe59 100644 --- a/README.md +++ b/README.md @@ -60,8 +60,7 @@ The output directory is then `/opt/prometheus-ecs` defined in your Prometheus co regex: (.+) ``` -You can also specify a discovery interval with `--interval` (in seconds). Default is 60s. We also provide caching to minimize hitting query -rate limits with the AWS ECS API. `discoverecs.py` runs in a loop until interrupted and will output target information to stdout. +You can also specify a discovery interval with `--interval` (in seconds). The default is `60s`. We also provide caching to minimize hitting query rate limits with the AWS ECS API. `discoverecs.py` runs in a loop until interrupted and will output target information to stdout. To make your application discoverable by Prometheus, you need to set the following environment variable in your task definition: @@ -72,9 +71,11 @@ To make your application discoverable by Prometheus, you need to set the followi Metric path and scrape interval is supported via `PROMETHEUS_ENDPOINT`: ```text -"interval:/metric_path,..." +"[interval:],..." ``` +where `interval` is optional. + Examples: ```text @@ -91,7 +92,23 @@ Under ECS task definition (`task.json`): Available scrape intervals: `15s`, `30s`, `1m`, `5m`. -The default metric path is `/metrics`. The default scrape interval is `1m`. +The default metric path is `/metrics`. + +### Default scrape interval + +The default scrape interval is `1m` when no interval is specified in the `PROMETHEUS_ENDPOINT` variable. + +This can be customised using the option `--default-scrape-interval-prefix`. This can be any string which will result in the targets being output to `/opt/prometheus-ecs/-tasks.json` being written. + +e.g. if `default` is used: + +```shell +--default-scrape-interval-prefix default +``` + +then `/opt/prometheus-ecs/default-tasks.json` will be written. This can be useful to allow configuration of a default scrape interval in your Prometheus config, rather than needing to update the config and then redeploying this discovery service. + +### Configuration yaml The following Prometheus configuration should be used to support all available intervals: diff --git a/discoverecs.py b/discoverecs.py index 939e843..c5c3551 100644 --- a/discoverecs.py +++ b/discoverecs.py @@ -341,11 +341,11 @@ def extract_path_interval(env_variable): if re.search("(15s|30s|1m|5m)", pi[0]): path_interval[pi[1]] = pi[0] else: - path_interval[pi[1]] = "1m" + path_interval[pi[1]] = None else: - path_interval[lst] = "1m" + path_interval[lst] = None else: - path_interval["/metrics"] = "1m" + path_interval["/metrics"] = None return path_interval @@ -457,14 +457,15 @@ def task_info_to_targets(task_info): class Main: - def __init__(self, directory, interval): + def __init__(self, directory, interval, default_scrape_interval_prefix): self.directory = directory self.interval = interval + self.default_scrape_interval_prefix = default_scrape_interval_prefix self.discoverer = TaskInfoDiscoverer() def write_jobs(self, jobs): - for i, j in jobs.items(): - file_name = self.directory + "/" + i + "-tasks.json" + for prefix, j in jobs.items(): + file_name = self.directory + "/" + prefix + "-tasks.json" tmp_file_name = file_name + ".tmp" with open(tmp_file_name, "w") as f: f.write(json.dumps(j, indent=4)) @@ -511,7 +512,7 @@ def discover_tasks(self): } if labels: job["labels"].update(labels) - jobs[interval].append(job) + jobs[interval or self.default_scrape_interval_prefix].append(job) log(job) self.write_jobs(jobs) @@ -525,15 +526,20 @@ def main(): arg_parser = argparse.ArgumentParser() arg_parser.add_argument("--directory", required=True) arg_parser.add_argument("--interval", default=60) + arg_parser.add_argument("--default-scrape-interval-prefix", default="1m") args = arg_parser.parse_args() log( - "Starting. Directory: " - + args.directory - + ". Interval: " - + str(args.interval) - + "s." + 'Starting...\nDirectory: "{}"\nRefresh interval: "{}s"\nDefault scrape interval prefix: "{}"\n'.format( + args.directory, + str(args.interval), + args.default_scrape_interval_prefix, + ) ) - Main(args.directory, float(args.interval)).loop() + Main( + directory=args.directory, + interval=float(args.interval), + default_scrape_interval_prefix=args.default_scrape_interval_prefix, + ).loop() if __name__ == "__main__":