From 3204f44cde326a1a5d3ecfde6be47504cd71efca Mon Sep 17 00:00:00 2001 From: Joey Hafner Date: Tue, 4 Jun 2024 12:57:59 -0700 Subject: [PATCH] Init --- doradash/.dockerignore | 34 +++++ doradash/API.md | 25 ++++ doradash/Dockerfile | 51 ++++++++ doradash/README.Docker.md | 22 ++++ doradash/README.md | 2 + doradash/app/__pycache__/main.cpython-312.pyc | Bin 0 -> 5142 bytes .../main_test.cpython-312-pytest-8.1.1.pyc | Bin 0 -> 1038 bytes doradash/app/main.py | 120 ++++++++++++++++++ doradash/app/main_test.py | 18 +++ doradash/docker-compose.yaml | 49 +++++++ doradash/requirements.txt | 4 + doradash/test_deployments.sh | 6 + doradash/test_service_events.sh | 11 ++ 13 files changed, 342 insertions(+) create mode 100644 doradash/.dockerignore create mode 100644 doradash/API.md create mode 100644 doradash/Dockerfile create mode 100644 doradash/README.Docker.md create mode 100644 doradash/README.md create mode 100644 doradash/app/__pycache__/main.cpython-312.pyc create mode 100644 doradash/app/__pycache__/main_test.cpython-312-pytest-8.1.1.pyc create mode 100644 doradash/app/main.py create mode 100644 doradash/app/main_test.py create mode 100644 doradash/docker-compose.yaml create mode 100644 doradash/requirements.txt create mode 100755 doradash/test_deployments.sh create mode 100755 doradash/test_service_events.sh diff --git a/doradash/.dockerignore b/doradash/.dockerignore new file mode 100644 index 00000000..5b3d79a8 --- /dev/null +++ b/doradash/.dockerignore @@ -0,0 +1,34 @@ +# Include any files or directories that you don't want to be copied to your +# container here (e.g., local build artifacts, temporary files, etc.). +# +# For more help, visit the .dockerignore file reference guide at +# https://docs.docker.com/go/build-context-dockerignore/ + +**/.DS_Store +**/__pycache__ +**/.venv +**/.classpath +**/.dockerignore +**/.env +**/.git +**/.gitignore +**/.project +**/.settings +**/.toolstarget +**/.vs +**/.vscode +**/*.*proj.user +**/*.dbmdl +**/*.jfm +**/bin +**/charts +**/docker-compose* +**/compose* +**/Dockerfile* +**/node_modules +**/npm-debug.log +**/obj +**/secrets.dev.yaml +**/values.dev.yaml +LICENSE +README.md diff --git a/doradash/API.md b/doradash/API.md new file mode 100644 index 00000000..fb7a552e --- /dev/null +++ b/doradash/API.md @@ -0,0 +1,25 @@ +# Design doc for API + +Stuff we're gonna need: + +- GET for each of the four metrics, plus most recent "rating". +- POST for data-generating events: deployment, outage/restoration. + +Given that this service relies on having data *pushed* to it, we can only ever return metrics based on the most recent deployment or outage/restoration event. + +So with that in mind, we have the following design for each of our endpoints: + +| Method | Description | Endpoint | Request Payload | Response Payload | +|:------:|:-----------:|:--------:|:---------------:|:----------------:| +| GET | Get deployment frequency | /api/metrics/deployment_frequency | - | {"TIMESTAMP", "COUNT", "UNIT"} | +| GET | Get lead time for changes | /api/metrics/lead_time_for_changes | - | {"TIMESTAMP", "COMPUTED_TIME"} | +| GET | Get time to restore service | /api/metrics/time_to_restore_service | - | {"TIMESTAMP", "COMPUTED_TIME"} | +| GET | Get change failure rate | /api/metrics/change_failure_rate | - | {"TIMESTAMP", "RATE"} | +| GET | Get current rating | /api/metrics/vanity | - | {"TIMESTAMP", "RATING"} | +| POST | Post new deployment event | /api/events/deployment | {"TIMESTAMP", "{INCLUDED_GIT_HASHES}", "OLDEST_COMMIT_TIMESTAMP", "DEPLOY_RETURN_STATUS"} | OK | +| POST | Post new service availability change event | /api/events/service_availability | {"TIMESTAMP", "SERVICE_ID", "EVENT_TYPE"} | + +### Notes +- As-is, this API leaves no room for versioning, publisher IDs, or meaningful correlation between deployments and service availability changes. +- As-is, we have no identification, authentication, or authorization systems. +- As-is, we have no way to view the dataset from which the values are calculated. \ No newline at end of file diff --git a/doradash/Dockerfile b/doradash/Dockerfile new file mode 100644 index 00000000..9b09b3b2 --- /dev/null +++ b/doradash/Dockerfile @@ -0,0 +1,51 @@ +# syntax=docker/dockerfile:1 + +# Comments are provided throughout this file to help you get started. +# If you need more help, visit the Dockerfile reference guide at +# https://docs.docker.com/go/dockerfile-reference/ + +# Want to help us make this template better? Share your feedback here: https://forms.gle/ybq9Krt8jtBL3iCk7 + +ARG PYTHON_VERSION=3.12.2 +FROM python:${PYTHON_VERSION}-slim as base + +# Prevents Python from writing pyc files. +ENV PYTHONDONTWRITEBYTECODE=1 + +# Keeps Python from buffering stdout and stderr to avoid situations where +# the application crashes without emitting any logs due to buffering. +ENV PYTHONUNBUFFERED=1 + +WORKDIR /app + +# Create a non-privileged user that the app will run under. +# See https://docs.docker.com/go/dockerfile-user-best-practices/ +ARG UID=10001 +RUN adduser \ + --disabled-password \ + --gecos "" \ + --home "/nonexistent" \ + --shell "/sbin/nologin" \ + --no-create-home \ + --uid "${UID}" \ + appuser + +# Download dependencies as a separate step to take advantage of Docker's caching. +# Leverage a cache mount to /root/.cache/pip to speed up subsequent builds. +# Leverage a bind mount to requirements.txt to avoid having to copy them into +# into this layer. +RUN --mount=type=cache,target=/root/.cache/pip \ + --mount=type=bind,source=requirements.txt,target=requirements.txt \ + python -m pip install -r requirements.txt + +# Switch to the non-privileged user to run the application. +USER appuser + +# Copy the source code into the container. +COPY . . + +# Expose the port that the application listens on. +EXPOSE 8000 + +# Run the application. +CMD uvicorn app.main:app --reload-dir /app --host 0.0.0.0 --port 8000 diff --git a/doradash/README.Docker.md b/doradash/README.Docker.md new file mode 100644 index 00000000..6dae561f --- /dev/null +++ b/doradash/README.Docker.md @@ -0,0 +1,22 @@ +### Building and running your application + +When you're ready, start your application by running: +`docker compose up --build`. + +Your application will be available at http://localhost:8000. + +### Deploying your application to the cloud + +First, build your image, e.g.: `docker build -t myapp .`. +If your cloud uses a different CPU architecture than your development +machine (e.g., you are on a Mac M1 and your cloud provider is amd64), +you'll want to build the image for that platform, e.g.: +`docker build --platform=linux/amd64 -t myapp .`. + +Then, push it to your registry, e.g. `docker push myregistry.com/myapp`. + +Consult Docker's [getting started](https://docs.docker.com/go/get-started-sharing/) +docs for more detail on building and pushing. + +### References +* [Docker's Python guide](https://docs.docker.com/language/python/) \ No newline at end of file diff --git a/doradash/README.md b/doradash/README.md new file mode 100644 index 00000000..1d03c7a1 --- /dev/null +++ b/doradash/README.md @@ -0,0 +1,2 @@ +# Doradash - A simple API server for tracking DORA metrics +Configure your Git, CI/CD, and observability platforms to integrate with Doradash to get a readout of your DORA metrics: deployment frequency, lead time for changes, time to restore service, and change failure rate. diff --git a/doradash/app/__pycache__/main.cpython-312.pyc b/doradash/app/__pycache__/main.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1d311bf8d07abd3f9d29472e1ec0cbae74fb9c62 GIT binary patch literal 5142 zcmb_gO>o=R5qEyFWuoMG*qBsPIR5 zfLxQNS|xF(N~@VV=|P(Q%=p-hbMP_8+)Ji8D83-8&p1n?xZBmnIpKAQ>*farn-s z`HUms$Os96y*tv*j4R<{Z6V!~aVOlY?M!#dlAz=V^P+7{5KHRrSG`K+~9Z z81sUuv6(7u_L%m2#Gaw^Ino`rbZ3)v*K?%1Z0TT=bTIFXbbpPDi13D6k#$v1WmLn1 zR8iBq4Bai)WKExX<2A#7{pQU#uHH$i%X%u8jc|tNvaG4E=Rgo%0t+cMtw^`!bPDw3 z=ybAqZ+H<#K1Zw~9t&3Jd+k890y5s%@$qAsU%`HY&?r;UJm8=exHL(}EV zvf)~kwMA7kdh9MKmsWr&C3BffN;kR`He8~rzCyDSjM7)MB;RBU0orJe-9;dqM7OX- zNRg{AYn4~HRfk?T8d<_I&s;@Ek$-6ErF@aQLG+fohyw3_B7d>X^DY;0Ow0W+)6c@Q zb0?$o@j2!0>D6$25T4_!b8ki>;czq#WOQ~cdVan*Hai*}pO1v+PK9U3#^>jhqQy8h zJ32NWiNuRP34_RK6 z9}WX&3>o7faSA36PsSsbFen{AJvSL29R=yRkzo)!39Fxkl|+n`$VP4ZU__COUC;bS$)xqZwJB3`GRPq3P7% zscJZ(C%I^Ne=Mh0)T@-{C~}i&&EOQngQ_H5y`vlcl$Jv!fQVb_9SuDhg6rF3vBg|Q zjV1&)ADn+IMys7&>({^PI9xh<>C^U49i<~zDjio# zzN@>4qJ4^S5%qL4OoM-=~G+|n6 zZPFza`TckG)YGzr_7uwtal=E_Q zVgBx!;nSn5?n00WqYyJ@NL!T0Oh>2HEbRof6w(%7DLS>Da_4?VmVGgBL2@7fWpypY$C0(Ero}()1AM+~+!k8*|+W zXlz1=QMlV2ZEXad?}iEtB@iwm4x{D9N;0Wx8m53$dIdn=@>-T{voN(UzS)N+msLZ# zh0u^SaA-I*WDSL8j)sP;@k3x@j#)RtGn*PNGg4~k$ht|pZIkYstuT0yW%r4^2wF5u zv1{as*k0Ke0+PhgC1Wwh%7A+uUpZm9HjDydH01{BF}rqU@r|Gh;y zdrM{3G`yP0mr{!1wGwWAS)Go!D7uiMGwDl6a1}I+gqamF-DE{v8buO?Tf+kjRtz4g z2uG@Io$t*6xi@2qqfnt6B5f+h=F?(k*2bi7cpL4cott3*PLB8PC=iH(K+S8ZZk&1t zJd32B%QjnZ*LkMgO z-DRQse&Mk&R273IA;`8?2sQ7jZdkd=@`k_O%eMai+aeu!wTn^AR|N;Z2@TA=FND{3~G zPl6izvNKs5tYnj<$ZC@xAxFuOqaa%?HP>s_;3YeA+1l*Vu}E$MOp6d+|J#vw_&YoX z28Am?%t$(t>;P?CRk;5}8y>@rq|j6eHc(o}W1~uJV6*^H4T3&D{WUr$BG7Son1@rq z!h>SMLf4vz)TC4v3X;4D+~$bgdrQ@uVB7Ely$BjE!L9ug2t?&z&)W1(_aG!W=Vfl) z3lQEGUnq+&Y>Ow#;)#kl{HV1ozEbra+M3+%KU3~Mv)w;g?w_pmU#R#le%fC4U4HDl z_~g>m`t)}MKJ#6yc~I@1m-qr3cPnE5pOX(pDg$F>ajYbanaCO1hn%!3E3C|ypw^R; zEIMfiLuUszut{z`3mrI}&w~!YLV!WfTm=ZSt9+3x*=2h}pvW&_&UogkyO1Lr6X0Y_ zMeJ%gQpd#eOdmQIJ|+Tnj*j&VyNZsF?N5%nZUzJRoNLk4WG<^{5l;gQW?IAZOyfYH z7GB9PXqe#`_Xstxh@VCP0{D8=7(xthqsIsyXlc7FHfyOqgwcV5^a@P!eYiFJdIB7E zbglh>;fQy}07k*1+rd~l7<+VLd;De@{)0E6*WJ18K2&xyEJe%x(Z9P#YEBpcHS&?r z1F>>=_+#zS%avHXd^lbb`gX+DjeKeF?Bl?>ia1dcCYnQQJ=T~QkrvF#NTP5|_TdGS z0^Ym#fWVI%+$!E&<3K{Vfjk5V;13EK3jY=)I?8QyK8NpMd{>bu0)~!NvJr;r&TMlV zBLV~?3?T)W-8$Y@nM{YEi-wR)0)YaIop!8Npymr`X3YoRvnyFR`S4wD?a=Bl4?XV8 zE`58@T?g2MUI(cJ+}h89fU~-L)@G_mn8#esLF?e-k^gJzGx1#2*H-oSZ2O1H{^3WS z&-~}BzW%MZ%f8V2Gf(VR%?~4Mtpqx9j{@a^cv-wu5-u?(MY_!*L4XO3mIe6b0+Kw% z&`t4s!*DO>6*;S?k`zy!F;IUUSy;&?v5=F~jh`?yvv9-nVmKgOnROZ~GnmQFAwyWs zLBWUlnPn}*0mfj_jzN`~fbK6;zI9(Pi_2Fiegk00)&2#f=HNK)3ljc3+ zK|Ei&{O_gK9DClQ+@bq@djuYPmpKR5^}w@7;8F9DK>M0lZSP!beZmR9>3yfSeIQF9aCX$lQjxG47Zm15UXst=_`SdS^y^G;96gHljCa^n^k rTD1<;!MM>1H}aHsap#d6t6{4)U~^AaIH)eY+!U6iJgC7Y^U{9+53`Qk literal 0 HcmV?d00001 diff --git a/doradash/app/__pycache__/main_test.cpython-312-pytest-8.1.1.pyc b/doradash/app/__pycache__/main_test.cpython-312-pytest-8.1.1.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee4b2fd68a46ebdd5c88f46183a05c3a79687507 GIT binary patch literal 1038 zcmb7C&1(}u6rb5_Hk*E>U`y3jS_Gj{=_aYVNm+^qp|wIic#&L|ZDwjV?3X)}mPi9V z^q?n?Mii_lNNK@8#7m*A)TyB0$(tp3>B*UGwv~EvmU-_t@6G%A?R@I(?E-v#2EXx` z1i%k*h?mj08LgA4d#Z}5mgrSovr=nYGS(N}z^R3hd z;V3r;6&0@@iIfEp+N>{RM_=JX{K1-sy8xYpNWH~l6PvclJ z2yGrNF%^3#@HrL0tUac*O7SD)FrKD(Q_%RO@X>G;}x zu6DaF?#aV#9t5(zOkJqk4z)ePdTs{|^W=2F3>r}b79lvKw(m`ZcsaBwX6X|XOU1S> z9vro4g`s!{5kaA7k0m%=EaCafVMaJa+Z!Mp6!;t$N2=lasP5qDaFid#qapXW_?3VV zegj|)?14)Mp!)!14i#{IxMj48Z;Y45tLdGgtNWSKcHh*O(aY<`dhvtt)_6C)Gg{am TxpJ6J4oKUezulLFBT=zG7)1nu literal 0 HcmV?d00001 diff --git a/doradash/app/main.py b/doradash/app/main.py new file mode 100644 index 00000000..ba613a6b --- /dev/null +++ b/doradash/app/main.py @@ -0,0 +1,120 @@ +from datetime import datetime, timedelta +from fastapi import FastAPI, HTTPException +from pydantic import BaseModel +from pydantic.functional_validators import field_validator +import re + +app = FastAPI() + +class Deployment(BaseModel): + event_timestamp: datetime = None # should look like 2024-03-12T14:29:46-0700 + hashes: list = None # each should match an sha1 hash format regex(\b[0-9a-f]{5,40}\b) + timestamp_oldest_commit: datetime = None # should look like 2024-03-12T14:29:46-0700 + deploy_return_status: str = None # should be "Success", "Failure", or "Invalid" + + @field_validator("event_timestamp","timestamp_oldest_commit") + def validate_datetime(cls, d): + # oh lord jesus datetime validation + date_text = str(d) + iso8601_regex = r"^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-2])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T\s]((([01]\d|2[0-3])((:?)[0-5]\d)?|24\:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$" + if re.match(iso8601_regex, date_text): + return d + else: + raise ValueError(f"date must be in ISO-8601 format: {d}") + + @field_validator("hashes") + def validate_hashes(cls, hashes): + if not len(hashes) > 0: + raise ValueError(f"commit hash list cannot be empty") + for h in hashes: + if not re.match(r"\b[0-9a-f]{5,40}\b", h): + raise ValueError(f"hash not valid sha1: {h}") + else: + return hashes + + @field_validator("deploy_return_status") + def validate_return_status(cls, status): + if status not in ["Success", "Failure", "Invalid"]: + raise ValueError(f"return_status must be one of \"Success\", \"Failure\", or \"Invalid\": {status}") + else: + return status + +class ServiceAvailabilityChange(BaseModel): + event_timestamp: datetime # should look like 2024-03-12T14:29:46-0700 + service_id: str # practically arbitrary, but maybe useful for later + event_type: str # should be "outage" or "restoration" + + @field_validator("event_type") + def validate_balanced_events(cls,event_type): + # since all inputs are validated one at a time, we can simplify the balancing logic + # we can use a naive algorithm (count outages, count restorations) here because we validate each input one at a time + + stack = [] + for event in service_events: + if event.event_type == "outage": + stack.append(event) + else: + if not stack or (\ + event.event_type == 'restoration' and \ + stack[-1] != 'outage'\ + ): + raise ValueError("no preceding outage for restoration event") + stack.pop() + +# please replace "store the dataset in an array in memory" before deploying +deployments = [] +service_events = [] + +@app.post("/api/events/deployment") +def append_deployment(deployment: Deployment): + deployments.append(deployment) + return deployment + +@app.post("/api/events/service_availability") +def append_service_availability(service_event: ServiceAvailabilityChange): + service_events.append(service_event) + return service_event + +@app.get("/api/metrics/deployment_frequency") +def get_deployment_frequency(): + deploys_in_day = {} + for deployment in deployments: + if deployment.event_timestamp.date() in deploys_in_day: + deploys_in_day[deployment.event_timestamp.date()] += 1 + else: + deploys_in_day[deployment.event_timestamp.date()] = 1 + return len(deployments) / len(deploys_in_day) + +@app.get("/api/metrics/lead_time_for_changes") +def get_lead_time_for_changes(): + time_deltas = [] + for deployment in deployments: + time_delta = deployment.event_timestamp - deployment.timestamp_oldest_commit + time_deltas.append(time_delta.seconds) + lead_time_for_changes = sum(time_deltas) / len(time_deltas) + return str(timedelta(seconds=lead_time_for_changes)) # standardize output format? + +@app.get("/api/metrics/time_to_restore_service") +def get_time_to_restore_service(): + # check for balanced events (a preceding outage for each restoration) + # for each balanced root-level event, get the time delta from first outage to final restoration + # append time delta to array of time deltas + # return average of time deltas array + + + +@app.get("/api/metrics/change_failure_rate") +def get_change_failure_rate(): + success_counter = 0 + failure_counter = 0 + for deployment in deployments: + if deployment.deploy_return_status == "Invalid": + pass + elif deployment.deploy_return_status == "Success": + success_counter += 1 + else: + failure_counter += 1 + return failure_counter / (success_counter + failure_counter) + +# @app.get("/api/metrics/vanity") +# def get_vanity(): diff --git a/doradash/app/main_test.py b/doradash/app/main_test.py new file mode 100644 index 00000000..b5d3e329 --- /dev/null +++ b/doradash/app/main_test.py @@ -0,0 +1,18 @@ +from datetime import datetime +import json +import requests + +valid_deployment = { + "event_timestamp": str(datetime.now()), + "hashes": ["d7d8937e8f169727852dea77bae30a8749fe21fc"], + "oldest_commit_timestamp": str(datetime.now()), + "deploy_return_status": "Success" +} + +def test_valid_deployment(): + #payload = + endpoint = "http://127.0.0.1:8000/api/events/deployment" + response = requests.post(endpoint, json=json.dumps(valid_deployment)) + print(response) + print(valid_deployment) + #assert response.status_code == 200 \ No newline at end of file diff --git a/doradash/docker-compose.yaml b/doradash/docker-compose.yaml new file mode 100644 index 00000000..b7e49e2b --- /dev/null +++ b/doradash/docker-compose.yaml @@ -0,0 +1,49 @@ +# Comments are provided throughout this file to help you get started. +# If you need more help, visit the Docker Compose reference guide at +# https://docs.docker.com/go/compose-spec-reference/ + +# Here the instructions define your application as a service called "server". +# This service is built from the Dockerfile in the current directory. +# You can add other services your application may depend on here, such as a +# database or a cache. For examples, see the Awesome Compose repository: +# https://github.com/docker/awesome-compose +services: + doradash: + build: + context: . + ports: + - 8000:8000 + +# The commented out section below is an example of how to define a PostgreSQL +# database that your application can use. `depends_on` tells Docker Compose to +# start the database before your application. The `db-data` volume persists the +# database data between container restarts. The `db-password` secret is used +# to set the database password. You must create `db/password.txt` and add +# a password of your choosing to it before running `docker compose up`. +# depends_on: +# db: +# condition: service_healthy +# db: +# image: postgres +# restart: always +# user: postgres +# secrets: +# - db-password +# volumes: +# - db-data:/var/lib/postgresql/data +# environment: +# - POSTGRES_DB=example +# - POSTGRES_PASSWORD_FILE=/run/secrets/db-password +# expose: +# - 5432 +# healthcheck: +# test: [ "CMD", "pg_isready" ] +# interval: 10s +# timeout: 5s +# retries: 5 +# volumes: +# db-data: +# secrets: +# db-password: +# file: db/password.txt + diff --git a/doradash/requirements.txt b/doradash/requirements.txt new file mode 100644 index 00000000..598a7394 --- /dev/null +++ b/doradash/requirements.txt @@ -0,0 +1,4 @@ +uvicorn==0.28.0 +fastapi==0.110.0 +pydantic==2.6.4 +pytest==8.1.1 \ No newline at end of file diff --git a/doradash/test_deployments.sh b/doradash/test_deployments.sh new file mode 100755 index 00000000..2b7c1d74 --- /dev/null +++ b/doradash/test_deployments.sh @@ -0,0 +1,6 @@ +#!/bin/bash +curl -X 'POST' 'http://127.0.0.1:8000/api/events/deployment' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-12T22:02:38.689Z","hashes": ["6ece311c24dd6a4b3dbbf8525a3a61854a32838d","d7d8937e8f169727852dea77bae30a8749fe21fc"],"timestamp_oldest_commit": "2024-03-11T22:02:38.689Z","deploy_return_status": "Failure"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/deployment' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-12T23:03:38.689Z","hashes": ["f5521851965c4866c5dc0e8edc9d5e2a40b5ebe6","b8c3bb11a978dbcbe507c53c62f715a728cdfd52"],"timestamp_oldest_commit": "2024-03-10T22:05:38.689Z","deploy_return_status": "Success"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/deployment' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-11T21:03:38.689Z","hashes": ["ae35c9c0e4f71ddf280bd297c42f04f2c0ce3838","d53e974d7e60295ed36c38a57870d1a6bfc7e399"],"timestamp_oldest_commit": "2024-03-11T20:05:38.689Z","deploy_return_status": "Success"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/deployment' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-10T23:03:38.689Z","hashes": ["b6a707faa68bc987ae549c0f36d053a412bd40da","b6a707faa68bc987ae549c0f36d053a412bd40da"],"timestamp_oldest_commit": "2024-03-10T14:05:38.689Z","deploy_return_status": "Success"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/deployment' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-02-10T23:03:38.689Z","hashes": ["94036270dd329559b58edc6f8780e03bd94509a3","b6d1abc911c08778424fb244de1f172f54905b81"],"timestamp_oldest_commit": "2024-02-09T14:05:38.689Z","deploy_return_status": "Invalid"}' \ No newline at end of file diff --git a/doradash/test_service_events.sh b/doradash/test_service_events.sh new file mode 100755 index 00000000..db1540ab --- /dev/null +++ b/doradash/test_service_events.sh @@ -0,0 +1,11 @@ +#!/bin/bash +curl -X 'POST' 'http://127.0.0.1:8000/api/events/service_availability' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-11T18:02:00.000Z","service_id": "plex","event_type": "outage"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/service_availability' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-11T19:02:00.000Z","service_id": "plex","event_type": "restoration"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/service_availability' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-11T20:02:00.000Z","service_id": "nextcloud","event_type": "outage"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/service_availability' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-11T21:02:00.000Z","service_id": "nextcloud","event_type": "restoration"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/service_availability' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-11T22:02:00.000Z","service_id": "nextcloud","event_type": "outage"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/service_availability' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-12T01:02:00.000Z","service_id": "nextcloud","event_type": "restoration"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/service_availability' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-12T02:02:00.000Z","service_id": "plex","event_type": "outage"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/service_availability' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-12T03:02:00.000Z","service_id": "plex","event_type": "restoration"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/service_availability' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-12T04:02:00.000Z","service_id": "plex","event_type": "outage"}' +curl -X 'POST' 'http://127.0.0.1:8000/api/events/service_availability' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"event_timestamp": "2024-03-12T04:02:00.000Z","service_id": "plex","event_type": "restoration"}'