commit 4c53a1985148de64c514d7ec95183f016f88a92d Author: Vyacheslav Boyko Date: Mon Sep 2 22:47:30 2024 +0300 logs analyzer with log preparer diff --git a/logs-preparer/test.sh b/logs-preparer/test.sh new file mode 100644 index 0000000..6c4f675 --- /dev/null +++ b/logs-preparer/test.sh @@ -0,0 +1,5 @@ +STRLENGTH=$(echo -n $1 | wc -m) +curl -v --location 'http://localhost:8082' \ + --header 'Content-Type: application/json' \ + --header "Content-Length: $STRLENGTH" \ + --data "$1" diff --git a/logs-preparer/vector.yaml b/logs-preparer/vector.yaml new file mode 100644 index 0000000..11b0c1b --- /dev/null +++ b/logs-preparer/vector.yaml @@ -0,0 +1,53 @@ +# Set global options +data_dir: "/var/lib/vector" + +# Vector's API (disabled by default) +# Enable and try it out with the `vector top` command +# NOTE: this is _enabled_ for helm chart deployments, see: https://github.com/vectordotdev/helm-charts/blob/develop/charts/vector/examples/datadog-values.yaml#L78-L81 +api: + enabled: true + address: "127.0.0.1:8687" + +sources: + from_ampq: + type: amqp + connection_string: amqp://rmuser:${RABBITMQ_PASS}@${RABBITMQ_HOST}:5672/%2f?timeout=10 + queue: logs + decoding: + codec: json + +transforms: + transform_message: + type: remap + inputs: + - from_ampq + source: >- + if (exists(.app)) { + . = push([], .app) + } + + log(., level:"info") + +sinks: + to_stdout: + type: console + inputs: + - transform_message + encoding: + codec: json + json: + pretty: true + to_signoz: + type: http + inputs: + - transform_message + uri: http://localhost:8082 + method: post + encoding: + codec: json + request: + headers: + Content-Type: application/json + concurrency: 10 + retry_attempts: 3 + retry_backoff_secs: 3 diff --git a/signoz/.dockerignore b/signoz/.dockerignore new file mode 100644 index 0000000..028b1e4 --- /dev/null +++ b/signoz/.dockerignore @@ -0,0 +1,6 @@ +.git +.github +.vscode +README.md +deploy +sample-apps \ No newline at end of file diff --git a/signoz/.editorconfig b/signoz/.editorconfig new file mode 100644 index 0000000..81ed7f2 --- /dev/null +++ b/signoz/.editorconfig @@ -0,0 +1,33 @@ +# EditorConfig is awesome: https://EditorConfig.org + +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true + +# Matches multiple files with brace expansion notation +# Set default charset +[*.{js,py}] +charset = utf-8 + +# 4 space indentation +[*.py] +indent_style = space +indent_size = 4 + +# Tab indentation (no size specified) +[Makefile] +indent_style = tab + +# Indentation override for all JS under lib directory +[lib/**.js] +indent_style = space +indent_size = 2 + +# Matches the exact files either package.json or .travis.yml +[{package.json,.travis.yml}] +indent_style = space +indent_size = 2 \ No newline at end of file diff --git a/signoz/.gitattributes b/signoz/.gitattributes new file mode 100644 index 0000000..8c64395 --- /dev/null +++ b/signoz/.gitattributes @@ -0,0 +1 @@ +*.css linguist-detectable=false \ No newline at end of file diff --git a/signoz/.gitignore b/signoz/.gitignore new file mode 100644 index 0000000..8fe54dc --- /dev/null +++ b/signoz/.gitignore @@ -0,0 +1,69 @@ + +node_modules + +deploy/docker/environment_tiny/common_test +frontend/node_modules +frontend/.pnp +frontend/i18n-translations-hash.json +*.pnp.js + +# testing +frontend/coverage + +# production +frontend/build +frontend/.vscode +frontend/.yarnclean +frontend/.temp_cache +frontend/test-results + +# misc +.DS_Store +.env.local +.env.development.local +.env.test.local +.env.production.local + +frontend/npm-debug.log* +frontend/yarn-debug.log* +frontend/yarn-error.log* +frontend/src/constants/env.ts + +.idea + +**/.vscode +**/build +**/storage +**/locust-scripts/__pycache__/ +**/__debug_bin + +.env +pkg/query-service/signoz.db + +pkg/query-service/tests/test-deploy/data/ + +ee/query-service/signoz.db + +ee/query-service/tests/test-deploy/data/ + +# local data +*.backup +*.db +/deploy/docker/clickhouse-setup/data/ +/deploy/docker-swarm/clickhouse-setup/data/ +bin/ + +*/query-service/queries.active + +# e2e + +e2e/node_modules/ +e2e/test-results/ +e2e/playwright-report/ +e2e/blob-report/ +e2e/playwright/.cache/ +e2e/.auth + +# go +vendor/ +**/main/** diff --git a/signoz/.gitpod.yml b/signoz/.gitpod.yml new file mode 100644 index 0000000..1771de8 --- /dev/null +++ b/signoz/.gitpod.yml @@ -0,0 +1,36 @@ +# Please adjust to your needs (see https://www.gitpod.io/docs/config-gitpod-file) +# and commit this file to your remote git repository to share the goodness with others. + + +tasks: + - name: Run Script to Comment ut required lines + init: | + cd ./.scripts + sh commentLinesForSetup.sh + + - name: Run Docker Images + init: | + cd ./deploy + sudo docker-compose -f docker/clickhouse-setup/docker-compose.yaml up -d + # command: + + - name: Run Frontend + init: | + cd ./frontend + yarn install + command: + yarn dev + +ports: + - port: 3301 + onOpen: open-browser + - port: 8080 + onOpen: ignore + - port: 9000 + onOpen: ignore + - port: 8123 + onOpen: ignore + - port: 8089 + onOpen: ignore + - port: 9093 + onOpen: ignore diff --git a/signoz/.scripts/commentLinesForSetup.sh b/signoz/.scripts/commentLinesForSetup.sh new file mode 100644 index 0000000..c0dfd40 --- /dev/null +++ b/signoz/.scripts/commentLinesForSetup.sh @@ -0,0 +1,7 @@ +#!/bin/sh + +# It Comments out the Line Query-Service & Frontend Section of deploy/docker/clickhouse-setup/docker-compose.yaml +# Update the Line Numbers when deploy/docker/clickhouse-setup/docker-compose.yaml chnages. +# Docs Ref.: https://github.com/SigNoz/signoz/blob/main/CONTRIBUTING.md#contribute-to-frontend-with-docker-installation-of-signoz + +sed -i 38,62's/.*/# &/' .././deploy/docker/clickhouse-setup/docker-compose.yaml diff --git a/signoz/CHANGELOG.md b/signoz/CHANGELOG.md new file mode 100644 index 0000000..e69de29 diff --git a/signoz/CODE_OF_CONDUCT.md b/signoz/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..245ad5f --- /dev/null +++ b/signoz/CODE_OF_CONDUCT.md @@ -0,0 +1,76 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at dev@signoz.io. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/signoz/CONTRIBUTING.md b/signoz/CONTRIBUTING.md new file mode 100644 index 0000000..cc1c439 --- /dev/null +++ b/signoz/CONTRIBUTING.md @@ -0,0 +1,385 @@ +# Contributing Guidelines + +## Welcome to SigNoz Contributing section 🎉 + +Hi there! We're thrilled that you'd like to contribute to this project, thank you for your interest. Whether it's a bug report, new feature, correction, or additional documentation, we greatly value feedback and contributions from our community. + +Please read through this document before submitting any issues or pull requests to ensure we have all the necessary information to effectively respond to your bug report or contribution. + +- We accept contributions made to the [SigNoz `develop` branch]() +- Find all SigNoz Docker Hub images here + - [signoz/frontend](https://hub.docker.com/r/signoz/frontend) + - [signoz/query-service](https://hub.docker.com/r/signoz/query-service) + - [signoz/otelcontribcol](https://hub.docker.com/r/signoz/otelcontribcol) + +## Finding contributions to work on 💬 + +Looking at the existing issues is a great way to find something to contribute on. +Also, have a look at these [good first issues label](https://github.com/SigNoz/signoz/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) to start with. + + +## Sections: +- [General Instructions](#1-general-instructions-) + - [For Creating Issue(s)](#11-for-creating-issues) + - [For Pull Requests(s)](#12-for-pull-requests) +- [How to Contribute](#2-how-to-contribute-%EF%B8%8F) +- [Develop Frontend](#3-develop-frontend-) + - [Contribute to Frontend with Docker installation of SigNoz](#31-contribute-to-frontend-with-docker-installation-of-signoz) + - [Contribute to Frontend without installing SigNoz backend](#32-contribute-to-frontend-without-installing-signoz-backend) +- [Contribute to Backend (Query-Service)](#4-contribute-to-backend-query-service-) + - [To run ClickHouse setup](#41-to-run-clickhouse-setup-recommended-for-local-development) +- [Contribute to SigNoz Helm Chart](#5-contribute-to-signoz-helm-chart-) + - [To run helm chart for local development](#51-to-run-helm-chart-for-local-development) +- [Other Ways to Contribute](#other-ways-to-contribute) + +# 1. General Instructions 📝 + +## 1.1 For Creating Issue(s) +Before making any significant changes and before filing a new issue, please check [existing open](https://github.com/SigNoz/signoz/issues?q=is%3Aopen+is%3Aissue), or [recently closed](https://github.com/SigNoz/signoz/issues?q=is%3Aissue+is%3Aclosed) issues to make sure somebody else hasn't already reported the issue. Please try to include as much information as you can. + +**Issue Types** - [Bug Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=bug_report.md&title=) | [Feature Request](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=feature_request.md&title=) | [Performance Issue Report](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=performance-issue-report.md&title=) | [Report a Security Vulnerability](https://github.com/SigNoz/signoz/security/policy) + +#### Details like these are incredibly useful: + +- **Requirement** - what kind of use case are you trying to solve? +- **Proposal** - what do you suggest to solve the problem or improve the existing + situation? +- Any open questions to address❓ + +#### If you are reporting a bug, details like these are incredibly useful: + +- A reproducible test case or series of steps. +- The version of our code being used. +- Any modifications you've made relevant to the bug🐞. +- Anything unusual about your environment or deployment. + +Discussing your proposed changes ahead of time will make the contribution +process smooth for everyone 🙌. + + **[`^top^`](#)** + +
+ +## 1.2 For Pull Request(s) + +Contributions via pull requests are much appreciated. Once the approach is agreed upon ✅, make your changes and open a Pull Request(s). +Before sending us a pull request, please ensure that, + +- Fork the SigNoz repo on GitHub, clone it on your machine. +- Create a branch with your changes. +- You are working against the latest source on the `develop` branch. +- Modify the source; please focus only on the specific change you are contributing. +- Ensure local tests pass. +- Commit to your fork using clear commit messages. +- Send us a pull request, answering any default questions in the pull request interface. +- Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation +- Once you've pushed your commits to GitHub, make sure that your branch can be auto-merged (there are no merge conflicts). If not, on your computer, merge main into your branch, resolve any merge conflicts, make sure everything still runs correctly and passes all the tests, and then push up those changes. +- Once the change has been approved and merged, we will inform you in a comment. + + +GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and +[creating a pull request](https://help.github.com/articles/creating-a-pull-request/). + +**Note:** Unless your change is small, **please** consider submitting different Pull Request(s): + +* 1️⃣ First PR should include the overall structure of the new component: + * Readme, configuration, interfaces or base classes, etc... + * This PR is usually trivial to review, so the size limit does not apply to + it. +* 2️⃣ Second PR should include the concrete implementation of the component. If the + size of this PR is larger than the recommended size, consider **splitting** ⚔️ it into + multiple PRs. +* If there are multiple sub-component then ideally each one should be implemented as + a **separate** pull request. +* Last PR should include changes to **any user-facing documentation.** And should include + end-to-end tests if applicable. The component must be enabled + only after sufficient testing, and there is enough confidence in the + stability and quality of the component. + + +You can always reach out to `ankit@signoz.io` to understand more about the repo and product. We are very responsive over email and [SLACK](https://signoz.io/slack). + +### Pointers: +- If you find any **bugs** → please create an [**issue.**](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=bug_report.md&title=) +- If you find anything **missing** in documentation → you can create an issue with the label **`documentation`**. +- If you want to build any **new feature** → please create an [issue with the label **`enhancement`**.](https://github.com/SigNoz/signoz/issues/new?assignees=&labels=&template=feature_request.md&title=) +- If you want to **discuss** something about the product, start a new [**discussion**.](https://github.com/SigNoz/signoz/discussions) + +
+ +### Conventions to follow when submitting Commits and Pull Request(s). + +We try to follow [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/), more specifically the commits and PRs **should have type specifiers** prefixed in the name. [This](https://www.conventionalcommits.org/en/v1.0.0/#specification) should give you a better idea. + +e.g. If you are submitting a fix for an issue in frontend, the PR name should be prefixed with **`fix(FE):`** + +- Follow [GitHub Flow](https://guides.github.com/introduction/flow/) guidelines for your contribution flows. + +- Feel free to ping us on [`#contributing`](https://signoz-community.slack.com/archives/C01LWQ8KS7M) or [`#contributing-frontend`](https://signoz-community.slack.com/archives/C027134DM8B) on our slack community if you need any help on this :) + + **[`^top^`](#)** + +
+ +# 2. How to Contribute 🙋🏻‍♂️ + +#### There are primarily 2 areas in which you can contribute to SigNoz + +- [**Frontend**](#3-develop-frontend-) (Written in Typescript, React) +- [**Backend**](#4-contribute-to-backend-query-service-) (Query Service, written in Go) + +Depending upon your area of expertise & interest, you can choose one or more to contribute. Below are detailed instructions to contribute in each area. + +**Please note:** If you want to work on an issue, please ask the maintainers to assign the issue to you before starting work on it. This would help us understand who is working on an issue and prevent duplicate work. 🙏🏻 + +⚠️ If you just raise a PR, without the corresponding issue being assigned to you - it may not be accepted. + + **[`^top^`](#)** + +
+ +# 3. Develop Frontend 🌚 + +**Need to Update: [https://github.com/SigNoz/signoz/tree/develop/frontend](https://github.com/SigNoz/signoz/tree/develop/frontend)** + +Also, have a look at [Frontend README.md](https://github.com/SigNoz/signoz/blob/develop/frontend/README.md) sections for more info on how to setup SigNoz frontend locally (with and without Docker). + +## 3.1 Contribute to Frontend with Docker installation of SigNoz + +- Clone the SigNoz repository and cd into signoz directory, + ``` + git clone https://github.com/SigNoz/signoz.git && cd signoz + ``` +- Comment out `frontend` service section at [`deploy/docker/clickhouse-setup/docker-compose.yaml#L68`](https://github.com/SigNoz/signoz/blob/develop/deploy/docker/clickhouse-setup/docker-compose.yaml#L68) + +![develop-frontend](https://user-images.githubusercontent.com/52788043/179009217-6692616b-17dc-4d27-b587-9d007098d739.jpeg) + + +- run `cd deploy` to move to deploy directory, +- Install signoz locally **without** the frontend, + - Add / Uncomment the below configuration to query-service section at [`deploy/docker/clickhouse-setup/docker-compose.yaml#L47`](https://github.com/SigNoz/signoz/blob/develop/deploy/docker/clickhouse-setup/docker-compose.yaml#L47) + ``` + ports: + - "8080:8080" + ``` +query service + + - Next run, + ``` + sudo docker-compose -f docker/clickhouse-setup/docker-compose.yaml up -d + ``` +- `cd ../frontend` and change baseURL in file [`frontend/src/constants/env.ts#L2`](https://github.com/SigNoz/signoz/blob/develop/frontend/src/constants/env.ts#L2) and for that, you need to create a `.env` file in the `frontend` directory with the following environment variable (`FRONTEND_API_ENDPOINT`) matching your configuration. + + If you have backend api exposed via frontend nginx: + ``` + FRONTEND_API_ENDPOINT=http://localhost:3301 + ``` + If not: + ``` + FRONTEND_API_ENDPOINT=http://localhost:8080 + ``` + +- Next, + ``` + yarn install + yarn dev + ``` + +### Important Notes: +The Maintainers / Contributors who will change Line Numbers of `Frontend` & `Query-Section`, please update line numbers in [`/.scripts/commentLinesForSetup.sh`](https://github.com/SigNoz/signoz/blob/develop/.scripts/commentLinesForSetup.sh) + + **[`^top^`](#)** + +## 3.2 Contribute to Frontend without installing SigNoz backend + +If you don't want to install the SigNoz backend just for doing frontend development, we can provide you with test environments that you can use as the backend. + +- Clone the SigNoz repository and cd into signoz/frontend directory, + ``` + git clone https://github.com/SigNoz/signoz.git && cd signoz/frontend + ```` +- Create a file `.env` in the `frontend` directory with `FRONTEND_API_ENDPOINT=` +- Next, + ``` + yarn install + yarn dev + ``` + +Please ping us in the [`#contributing`](https://signoz-community.slack.com/archives/C01LWQ8KS7M) channel or ask `@Prashant Shahi` in our [Slack Community](https://signoz.io/slack) and we will DM you with ``. + +**Frontend should now be accessible at** [`http://localhost:3301/services`](http://localhost:3301/services) + + **[`^top^`](#)** + +
+ +# 4. Contribute to Backend (Query-Service) 🌑 + +**Need to Update: [https://github.com/SigNoz/signoz/tree/develop/pkg/query-service](https://github.com/SigNoz/signoz/tree/develop/pkg/query-service)** + +## 4.1 Prerequisites + +### 4.1.1 Install SQLite3 + +- Run `sqlite3` command to check if you already have SQLite3 installed on your machine. + +- If not installed already, Install using below command + - on Linux + - on Debian / Ubuntu + ``` + sudo apt install sqlite3 + ``` + - on CentOS / Fedora / RedHat + ``` + sudo yum install sqlite3 + ``` + +## 4.2 To run ClickHouse setup (recommended for local development) + +- Clone the SigNoz repository and cd into signoz directory, + ``` + git clone https://github.com/SigNoz/signoz.git && cd signoz + ``` +- run `sudo make dev-setup` to configure local setup to run query-service, +- Comment out `frontend` service section at [`deploy/docker/clickhouse-setup/docker-compose.yaml#L68`](https://github.com/SigNoz/signoz/blob/develop/deploy/docker/clickhouse-setup/docker-compose.yaml#L68) +develop-frontend + +- Comment out `query-service` section at [`deploy/docker/clickhouse-setup/docker-compose.yaml#L41`,](https://github.com/SigNoz/signoz/blob/develop/deploy/docker/clickhouse-setup/docker-compose.yaml#L41) +Screenshot 2022-07-14 at 22 48 07 + +- add below configuration to `clickhouse` section at [`deploy/docker/clickhouse-setup/docker-compose.yaml`,](https://github.com/SigNoz/signoz/blob/develop/deploy/docker/clickhouse-setup/docker-compose.yaml) + ``` + ports: + - 9001:9000 + ``` +Screenshot 2022-07-14 at 22 50 37 + +- run `cd pkg/query-service/` to move to `query-service` directory, +- Then, you need to create a `.env` file with the following environment variable + ``` + SIGNOZ_LOCAL_DB_PATH="./signoz.db" + ``` +to set your local environment with the right `RELATIONAL_DATASOURCE_PATH` as mentioned in [`./constants/constants.go#L38`,](https://github.com/SigNoz/signoz/blob/develop/pkg/query-service/constants/constants.go#L38) + +- Now, install SigNoz locally **without** the `frontend` and `query-service`, + - If you are using `x86_64` processors (All Intel/AMD processors) run `sudo make run-x86` + - If you are on `arm64` processors (Apple M1 Macs) run `sudo make run-arm` + +#### Run locally, +``` +ClickHouseUrl=tcp://localhost:9001 STORAGE=clickhouse go run main.go +``` + +#### Build and Run locally +``` +cd pkg/query-service +go build -o build/query-service main.go +ClickHouseUrl=tcp://localhost:9001 STORAGE=clickhouse build/query-service +``` + +#### Docker Images +The docker images of query-service is available at https://hub.docker.com/r/signoz/query-service + +``` +docker pull signoz/query-service +``` + +``` +docker pull signoz/query-service:latest +``` + +``` +docker pull signoz/query-service:develop +``` + +### Important Note: +The Maintainers / Contributors who will change Line Numbers of `Frontend` & `Query-Section`, please update line numbers in [`/.scripts/commentLinesForSetup.sh`](https://github.com/SigNoz/signoz/blob/develop/.scripts/commentLinesForSetup.sh) + + + +**Query Service should now be available at** [`http://localhost:8080`](http://localhost:8080) + +If you want to see how the frontend plays with query service, you can run the frontend also in your local env with the baseURL changed to `http://localhost:8080` in file [`frontend/src/constants/env.ts`](https://github.com/SigNoz/signoz/blob/develop/frontend/src/constants/env.ts) as the `query-service` is now running at port `8080`. + + + + **[`^top^`](#)** + +
+ +# 5. Contribute to SigNoz Helm Chart 📊 + +**Need to Update: [https://github.com/SigNoz/charts](https://github.com/SigNoz/charts).** + +## 5.1 To run helm chart for local development + +- Clone the SigNoz repository and cd into charts directory, + ``` + git clone https://github.com/SigNoz/charts.git && cd charts + ``` +- It is recommended to use lightweight kubernetes (k8s) cluster for local development: + - [kind](https://kind.sigs.k8s.io/docs/user/quick-start/#installation) + - [k3d](https://k3d.io/#installation) + - [minikube](https://minikube.sigs.k8s.io/docs/start/) +- create a k8s cluster and make sure `kubectl` points to the locally created k8s cluster, +- run `make dev-install` to install SigNoz chart with `my-release` release name in `platform` namespace, +- next run, + ``` + kubectl -n platform port-forward svc/my-release-signoz-frontend 3301:3301 + ``` +to make SigNoz UI available at [localhost:3301](http://localhost:3301) + +**5.1.1 To install the HotROD sample app:** + +```bash +curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-install.sh \ + | HELM_RELEASE=my-release SIGNOZ_NAMESPACE=platform bash +``` + +**5.1.2 To load data with the HotROD sample app:** + +```bash +kubectl -n sample-application run strzal --image=djbingham/curl \ + --restart='OnFailure' -i --tty --rm --command -- curl -X POST -F \ + 'user_count=6' -F 'spawn_rate=2' http://locust-master:8089/swarm +``` + +**5.1.3 To stop the load generation:** + +```bash +kubectl -n sample-application run strzal --image=djbingham/curl \ + --restart='OnFailure' -i --tty --rm --command -- curl \ + http://locust-master:8089/stop +``` + +**5.1.4 To delete the HotROD sample app:** + +```bash +curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-delete.sh \ + | HOTROD_NAMESPACE=sample-application bash +``` + + **[`^top^`](#)** + +--- + +## Other Ways to Contribute + +There are many other ways to get involved with the community and to participate in this project: + +- Use the product, submitting GitHub issues when a problem is found. +- Help code review pull requests and participate in issue threads. +- Submit a new feature request as an issue. +- Help answer questions on forums such as Stack Overflow and [SigNoz Community Slack Channel](https://signoz.io/slack). +- Tell others about the project on Twitter, your blog, etc. + + +Again, Feel free to ping us on [`#contributing`](https://signoz-community.slack.com/archives/C01LWQ8KS7M) or [`#contributing-frontend`](https://signoz-community.slack.com/archives/C027134DM8B) on our slack community if you need any help on this :) + +Thank You! diff --git a/signoz/LICENSE b/signoz/LICENSE new file mode 100644 index 0000000..2fef891 --- /dev/null +++ b/signoz/LICENSE @@ -0,0 +1,25 @@ +Copyright (c) 2020-present SigNoz Inc. + +Portions of this software are licensed as follows: + +* All content that resides under the "ee/" directory of this repository, if that directory exists, is licensed under the license defined in "ee/LICENSE". +* All third party components incorporated into the SigNoz Software are licensed under the original license provided by the owner of the applicable component. +* Content outside of the above mentioned directories or restrictions above is available under the "MIT Expat" license as defined below. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/signoz/Makefile b/signoz/Makefile new file mode 100644 index 0000000..5f4a3c1 --- /dev/null +++ b/signoz/Makefile @@ -0,0 +1,191 @@ +# +# Reference Guide - https://www.gnu.org/software/make/manual/make.html +# + +# Build variables +BUILD_VERSION ?= $(shell git describe --always --tags) +BUILD_HASH ?= $(shell git rev-parse --short HEAD) +BUILD_TIME ?= $(shell date -u +"%Y-%m-%dT%H:%M:%SZ") +BUILD_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD) +DEV_LICENSE_SIGNOZ_IO ?= https://staging-license.signoz.io/api/v1 +DEV_BUILD ?= "" # set to any non-empty value to enable dev build + +# Internal variables or constants. +FRONTEND_DIRECTORY ?= frontend +QUERY_SERVICE_DIRECTORY ?= pkg/query-service +EE_QUERY_SERVICE_DIRECTORY ?= ee/query-service +STANDALONE_DIRECTORY ?= deploy/docker/clickhouse-setup +SWARM_DIRECTORY ?= deploy/docker-swarm/clickhouse-setup + +GOOS ?= $(shell go env GOOS) +GOARCH ?= $(shell go env GOARCH) +GOPATH ?= $(shell go env GOPATH) + +REPONAME ?= signoz +DOCKER_TAG ?= $(subst v,,$(BUILD_VERSION)) +FRONTEND_DOCKER_IMAGE ?= frontend +QUERY_SERVICE_DOCKER_IMAGE ?= query-service + +# Build-time Go variables +PACKAGE?=go.signoz.io/signoz +buildVersion=${PACKAGE}/pkg/query-service/version.buildVersion +buildHash=${PACKAGE}/pkg/query-service/version.buildHash +buildTime=${PACKAGE}/pkg/query-service/version.buildTime +gitBranch=${PACKAGE}/pkg/query-service/version.gitBranch +licenseSignozIo=${PACKAGE}/ee/query-service/constants.LicenseSignozIo + +LD_FLAGS=-X ${buildHash}=${BUILD_HASH} -X ${buildTime}=${BUILD_TIME} -X ${buildVersion}=${BUILD_VERSION} -X ${gitBranch}=${BUILD_BRANCH} +DEV_LD_FLAGS=-X ${licenseSignozIo}=${DEV_LICENSE_SIGNOZ_IO} + +all: build-push-frontend build-push-query-service + +# Steps to build static files of frontend +build-frontend-static: + @echo "------------------" + @echo "--> Building frontend static files" + @echo "------------------" + @cd $(FRONTEND_DIRECTORY) && \ + rm -rf build && \ + CI=1 yarn install && \ + yarn build && \ + ls -l build + +# Steps to build and push docker image of frontend +.PHONY: build-frontend-amd64 build-push-frontend +# Step to build docker image of frontend in amd64 (used in build pipeline) +build-frontend-amd64: build-frontend-static + @echo "------------------" + @echo "--> Building frontend docker image for amd64" + @echo "------------------" + @cd $(FRONTEND_DIRECTORY) && \ + docker build --file Dockerfile -t $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) \ + --build-arg TARGETPLATFORM="linux/amd64" . + +# Step to build and push docker image of frontend(used in push pipeline) +build-push-frontend: build-frontend-static + @echo "------------------" + @echo "--> Building and pushing frontend docker image" + @echo "------------------" + @cd $(FRONTEND_DIRECTORY) && \ + docker buildx build --file Dockerfile --progress plain --push --platform linux/arm64,linux/amd64 \ + --tag $(REPONAME)/$(FRONTEND_DOCKER_IMAGE):$(DOCKER_TAG) . + +# Steps to build static binary of query service +.PHONY: build-query-service-static +build-query-service-static: + @echo "------------------" + @echo "--> Building query-service static binary" + @echo "------------------" + @if [ $(DEV_BUILD) != "" ]; then \ + cd $(QUERY_SERVICE_DIRECTORY) && \ + CGO_ENABLED=1 go build -tags timetzdata -a -o ./bin/query-service-${GOOS}-${GOARCH} \ + -ldflags "-linkmode external -extldflags '-static' -s -w ${LD_FLAGS} ${DEV_LD_FLAGS}"; \ + else \ + cd $(QUERY_SERVICE_DIRECTORY) && \ + CGO_ENABLED=1 go build -tags timetzdata -a -o ./bin/query-service-${GOOS}-${GOARCH} \ + -ldflags "-linkmode external -extldflags '-static' -s -w ${LD_FLAGS}"; \ + fi + +.PHONY: build-query-service-static-amd64 +build-query-service-static-amd64: + make GOARCH=amd64 build-query-service-static + +.PHONY: build-query-service-static-arm64 +build-query-service-static-arm64: + make CC=aarch64-linux-gnu-gcc GOARCH=arm64 build-query-service-static + +# Steps to build static binary of query service for all platforms +.PHONY: build-query-service-static-all +build-query-service-static-all: build-query-service-static-amd64 build-query-service-static-arm64 + +# Steps to build and push docker image of query service +.PHONY: build-query-service-amd64 build-push-query-service +# Step to build docker image of query service in amd64 (used in build pipeline) +build-query-service-amd64: build-query-service-static-amd64 + @echo "------------------" + @echo "--> Building query-service docker image for amd64" + @echo "------------------" + @docker build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile \ + --tag $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) \ + --build-arg TARGETPLATFORM="linux/amd64" . + +# Step to build and push docker image of query in amd64 and arm64 (used in push pipeline) +build-push-query-service: build-query-service-static-all + @echo "------------------" + @echo "--> Building and pushing query-service docker image" + @echo "------------------" + @docker buildx build --file $(QUERY_SERVICE_DIRECTORY)/Dockerfile --progress plain \ + --push --platform linux/arm64,linux/amd64 \ + --tag $(REPONAME)/$(QUERY_SERVICE_DOCKER_IMAGE):$(DOCKER_TAG) . + +# Step to build EE docker image of query service in amd64 (used in build pipeline) +build-ee-query-service-amd64: + @echo "------------------" + @echo "--> Building query-service docker image for amd64" + @echo "------------------" + make QUERY_SERVICE_DIRECTORY=${EE_QUERY_SERVICE_DIRECTORY} build-query-service-amd64 + +# Step to build and push EE docker image of query in amd64 and arm64 (used in push pipeline) +build-push-ee-query-service: + @echo "------------------" + @echo "--> Building and pushing query-service docker image" + @echo "------------------" + make QUERY_SERVICE_DIRECTORY=${EE_QUERY_SERVICE_DIRECTORY} build-push-query-service + +dev-setup: + mkdir -p /var/lib/signoz + sqlite3 /var/lib/signoz/signoz.db "VACUUM"; + mkdir -p pkg/query-service/config/dashboards + @echo "------------------" + @echo "--> Local Setup completed" + @echo "------------------" + +run-local: + @docker-compose -f \ + $(STANDALONE_DIRECTORY)/docker-compose-core.yaml -f $(STANDALONE_DIRECTORY)/docker-compose-local.yaml \ + up --build -d + +down-local: + @docker-compose -f \ + $(STANDALONE_DIRECTORY)/docker-compose-core.yaml -f $(STANDALONE_DIRECTORY)/docker-compose-local.yaml \ + down -v + +pull-signoz: + @docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml pull + +run-signoz: + @docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml up --build -d + +run-testing: + @docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.testing.yaml up --build -d + +down-signoz: + @docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml down -v + +clear-standalone-data: + @docker run --rm -v "$(PWD)/$(STANDALONE_DIRECTORY)/data:/pwd" busybox \ + sh -c "cd /pwd && rm -rf alertmanager/* clickhouse*/* signoz/* zookeeper-*/*" + +clear-swarm-data: + @docker run --rm -v "$(PWD)/$(SWARM_DIRECTORY)/data:/pwd" busybox \ + sh -c "cd /pwd && rm -rf alertmanager/* clickhouse*/* signoz/* zookeeper-*/*" + +clear-standalone-ch: + @docker run --rm -v "$(PWD)/$(STANDALONE_DIRECTORY)/data:/pwd" busybox \ + sh -c "cd /pwd && rm -rf clickhouse*/* zookeeper-*/*" + +clear-swarm-ch: + @docker run --rm -v "$(PWD)/$(SWARM_DIRECTORY)/data:/pwd" busybox \ + sh -c "cd /pwd && rm -rf clickhouse*/* zookeeper-*/*" + +test: + go test ./pkg/query-service/app/metrics/... + go test ./pkg/query-service/cache/... + go test ./pkg/query-service/app/... + go test ./pkg/query-service/app/querier/... + go test ./pkg/query-service/converter/... + go test ./pkg/query-service/formatter/... + go test ./pkg/query-service/tests/integration/... + go test ./pkg/query-service/rules/... + go test ./pkg/query-service/collectorsimulator/... + go test ./pkg/query-service/postprocess/... diff --git a/signoz/README.de-de.md b/signoz/README.de-de.md new file mode 100644 index 0000000..1fdbcbd --- /dev/null +++ b/signoz/README.de-de.md @@ -0,0 +1,216 @@ +

+ SigNoz-logo + +

Überwache deine Anwendungen und behebe Probleme in deinen bereitgestellten Anwendungen. SigNoz ist eine Open Source Alternative zu DataDog, New Relic, etc.

+

+ +

+ Downloads + GitHub issues + + tweet +

+ +

+ Dokumentation • + Readme auf Englisch • + ReadMe auf Chinesisch • + ReadMe auf Portugiesisch • + Slack Community • + Twitter +

+ +## + +SigNoz hilft Entwicklern, Anwendungen zu überwachen und Probleme in ihren bereitgestellten Anwendungen zu beheben. Mit SigNoz können Sie Folgendes tun: + +👉 Visualisieren Sie Metriken, Traces und Logs in einer einzigen Oberfläche. + +👉 Sie können Metriken wie die p99-Latenz, Fehlerquoten für Ihre Dienste, externe API-Aufrufe und individuelle Endpunkte anzeigen. + +👉 Sie können die Ursache des Problems ermitteln, indem Sie zu den genauen Traces gehen, die das Problem verursachen, und detaillierte Flammenbilder einzelner Anfragetraces anzeigen. + +👉 Führen Sie Aggregationen auf Trace-Daten durch, um geschäftsrelevante Metriken zu erhalten. + +👉 Filtern und Abfragen von Logs, Erstellen von Dashboards und Benachrichtigungen basierend auf Attributen in den Logs. + +👉 Automatische Aufzeichnung von Ausnahmen in Python, Java, Ruby und Javascript. + +👉 Einfache Einrichtung von Benachrichtigungen mit dem selbst erstellbaren Abfrage-Builder. + +## + +### Anwendung Metriken + +![application_metrics](https://user-images.githubusercontent.com/83692067/226637410-900dbc5e-6705-4b11-a10c-bd0faeb2a92f.png) + +### Verteiltes Tracing + +distributed_tracing_2 2 + +distributed_tracing_1 + +### Log Verwaltung + +logs_management + +### Infrastruktur Überwachung + +infrastructure_monitoring + +### Exceptions Monitoring + +![exceptions_light](https://user-images.githubusercontent.com/83692067/226637967-4188d024-3ac9-4799-be95-f5ea9c45436f.png) + +### Alarme + +alerts_management + +

+ +## Werde Teil unserer Slack Community + +Sag Hi zu uns auf [Slack](https://signoz.io/slack) 👋 + +

+ +## Funktionen: + +- Einheitliche Benutzeroberfläche für Metriken, Traces und Logs. Keine Notwendigkeit, zwischen Prometheus und Jaeger zu wechseln, um Probleme zu debuggen oder ein separates Log-Tool wie Elastic neben Ihrer Metriken- und Traces-Stack zu verwenden. +- Überblick über Anwendungsmetriken wie RPS, Latenzzeiten des 50tes/90tes/99tes Perzentils und Fehlerquoten. +- Langsamste Endpunkte in Ihrer Anwendung. +- Zeigen Sie genaue Anfragetraces an, um Probleme in nachgelagerten Diensten, langsamen Datenbankabfragen oder Aufrufen von Drittanbieterdiensten wie Zahlungsgateways zu identifizieren. +- Filtern Sie Traces nach Dienstname, Operation, Latenz, Fehler, Tags/Annotationen. +- Führen Sie Aggregationen auf Trace-Daten (Ereignisse/Spans) durch, um geschäftsrelevante Metriken zu erhalten. Beispielsweise können Sie die Fehlerquote und die 99tes Perzentillatenz für `customer_type: gold` oder `deployment_version: v2` oder `external_call: paypal` erhalten. +- Native Unterstützung für OpenTelemetry-Logs, erweiterten Log-Abfrage-Builder und automatische Log-Sammlung aus dem Kubernetes-Cluster. +- Blitzschnelle Log-Analytik ([Logs Perf. Benchmark](https://signoz.io/blog/logs-performance-benchmark/)) +- End-to-End-Sichtbarkeit der Infrastrukturleistung, Aufnahme von Metriken aus allen Arten von Host-Umgebungen. +- Einfache Einrichtung von Benachrichtigungen mit dem selbst erstellbaren Abfrage-Builder. + +

+ +## Wieso SigNoz? + +Als Entwickler fanden wir es anstrengend, uns für jede kleine Funktion, die wir haben wollten, auf Closed Source SaaS Anbieter verlassen zu müssen. Closed Source Anbieter überraschen ihre Kunden zum Monatsende oft mit hohen Rechnungen, die keine Transparenz bzgl. der Kostenaufteilung bieten. + +Wir wollten eine selbst gehostete, Open Source Variante von Lösungen wie DataDog, NewRelic für Firmen anbieten, die Datenschutz und Sicherheitsbedenken haben, bei der Weitergabe von Kundendaten an Drittanbieter. + +Open Source gibt dir außerdem die totale Kontrolle über deine Konfiguration, Stichprobenentnahme und Betriebszeit. Du kannst des Weiteren neue Module auf Basis von SigNoz bauen, die erweiterte, geschäftsspezifische Funktionen anbieten. + +### Languages supported: + +Wir unterstützen [OpenTelemetry](https://opentelemetry.io) als Bibliothek, mit der Sie Ihre Anwendungen instrumentieren können. Daher wird jedes von OpenTelemetry unterstützte Framework und jede Sprache auch von SignNoz unterstützt. Einige der wichtigsten unterstützten Sprachen sind: + +- Java +- Python +- NodeJS +- Go +- PHP +- .NET +- Ruby +- Elixir +- Rust + +Hier findest du die vollständige Liste von unterstützten Programmiersprachen - https://opentelemetry.io/docs/ + +

+ +## Erste Schritte mit SigNoz + +### Bereitstellung mit Docker + +Bitte folge den [hier](https://signoz.io/docs/install/docker/) aufgelisteten Schritten um deine Anwendung mit Docker bereitzustellen. + +Die [Anleitungen zur Fehlerbehebung](https://signoz.io/docs/install/troubleshooting/) könnten hilfreich sein, falls du auf irgendwelche Schwierigkeiten stößt. + +

 

+ +### Deploy in Kubernetes using Helm + +Bitte folge den [hier](https://signoz.io/docs/deployment/helm_chart) aufgelisteten Schritten, um deine Anwendung mit Helm Charts bereitzustellen. + +

+ +## Vergleiche mit bekannten Tools + +### SigNoz vs Prometheus + +Prometheus ist gut, falls du dich nur für Metriken interessierst. Wenn du eine nahtlose Integration von Metriken und Einzelschritt-Fehlersuchen haben möchtest, ist die Kombination aus Prometheus und Jaeger nicht das Richtige für dich. + +Unser Ziel ist es, eine integrierte Benutzeroberfläche aus Metriken und Einzelschritt-Fehlersuchen anzubieten, ähnlich wie es SaaS Anbieter wie Datadog tun, mit der Möglichkeit von erweitertem filtern und aggregieren von Fehlersuchen. Etwas, was in Jaeger aktuell fehlt. + +

 

+ +### SigNoz vs Jaeger + +Jaeger kümmert sich nur um verteilte Einzelschritt-Fehlersuche. SigNoz erstellt sowohl Metriken als auch Einzelschritt-Fehlersuche, daneben haben wir auch Protokoll Verwaltung auf unserem Plan. + +Außerdem hat SigNoz noch mehr spezielle Funktionen im Vergleich zu Jaeger: + +- Jaeger UI zeigt keine Metriken für Einzelschritt-Fehlersuchen oder für gefilterte Einzelschritt-Fehlersuchen an. +- Jaeger erstellt keine Aggregate für gefilterte Einzelschritt-Fehlersuchen, z. B. die P99 Latenz von Abfragen mit dem Tag `customer_type=premium`, was hingegen mit SigNoz leicht umsetzbar ist. + +

 

+ +### SigNoz vs Elastic + +- Die Verwaltung von SigNoz-Protokollen basiert auf 'ClickHouse', einem spaltenbasierten OLAP-Datenspeicher, der aggregierte Protokollanalyseabfragen wesentlich effizienter macht. +- 50 % geringerer Ressourcenbedarf im Vergleich zu Elastic während der Aufnahme. + +Wir haben Benchmarks veröffentlicht, die Elastic mit SignNoz vergleichen. Schauen Sie es sich [hier](https://signoz.io/blog/logs-performance-benchmark/?utm_source=github-readme&utm_medium=logs-benchmark) + +

 

+ +### SigNoz vs Loki + +- SigNoz unterstützt Aggregationen von Daten mit hoher Kardinalität über ein großes Volumen, Loki hingegen nicht. +- SigNoz unterstützt Indizes über Daten mit hoher Kardinalität und hat keine Beschränkungen hinsichtlich der Anzahl der Indizes, während Loki maximale Streams erreicht, wenn ein paar Indizes hinzugefügt werden. +- Das Durchsuchen großer Datenmengen ist in Loki im Vergleich zu SigNoz schwierig und langsam. + +Wir haben Benchmarks veröffentlicht, die Loki mit SigNoz vergleichen. Schauen Sie es sich [hier](https://signoz.io/blog/logs-performance-benchmark/?utm_source=github-readme&utm_medium=logs-benchmark) + +

+ +## Zum Projekt beitragen + +Wir ❤️ Beiträge zum Projekt, egal ob große oder kleine. Bitte lies dir zuerst die [CONTRIBUTING.md](CONTRIBUTING.md), durch, bevor du anfängst, Beiträge zu SigNoz zu machen. +Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem #contributing Kanal in unserer [slack community](https://signoz.io/slack) + +### Unsere Projektbetreuer + +#### Backend + +- [Ankit Nayan](https://github.com/ankitnayan) +- [Nityananda Gohain](https://github.com/nityanandagohain) +- [Srikanth Chekuri](https://github.com/srikanthccv) +- [Vishal Sharma](https://github.com/makeavish) + +#### Frontend + +- [Palash Gupta](https://github.com/palashgdev) +- [Yunus M](https://github.com/YounixM) +- [Rajat Dabade](https://github.com/Rajat-Dabade) + +#### DevOps + +- [Prashant Shahi](https://github.com/prashant-shahi) + +

+ +## Dokumentation + +Du findest unsere Dokumentation unter https://signoz.io/docs/. Falls etwas unverständlich ist oder fehlt, öffne gerne ein Github Issue mit dem Label `documentation` oder schreib uns über den Community Slack Channel. + +

+ +## Gemeinschaft + +Werde Teil der [slack community](https://signoz.io/slack) um mehr über verteilte Einzelschritt-Fehlersuche, Messung von Systemzuständen oder SigNoz zu erfahren und sich mit anderen Nutzern und Mitwirkenden in Verbindung zu setzen. + +Falls du irgendwelche Ideen, Fragen oder Feedback hast, kannst du sie gerne über unsere [Github Discussions](https://github.com/SigNoz/signoz/discussions) mit uns teilen. + +Wie immer, Dank an unsere großartigen Mitwirkenden! + + + + diff --git a/signoz/README.md b/signoz/README.md new file mode 100644 index 0000000..852b44f --- /dev/null +++ b/signoz/README.md @@ -0,0 +1,230 @@ +

+ SigNoz-logo + +

Monitor your applications and troubleshoot problems in your deployed applications, an open-source alternative to DataDog, New Relic, etc.

+

+ +

+ Downloads + GitHub issues + + tweet +

+ + +

+ Documentation • + ReadMe in Chinese • + ReadMe in German • + ReadMe in Portuguese • + Slack Community • + Twitter +

+ +## + +SigNoz helps developers monitor applications and troubleshoot problems in their deployed applications. With SigNoz, you can: + +👉 Visualise Metrics, Traces and Logs in a single pane of glass + +👉 You can see metrics like p99 latency, error rates for your services, external API calls and individual end points. + +👉 You can find the root cause of the problem by going to the exact traces which are causing the problem and see detailed flamegraphs of individual request traces. + +👉 Run aggregates on trace data to get business relevant metrics + +👉 Filter and query logs, build dashboards and alerts based on attributes in logs + +👉 Record exceptions automatically in Python, Java, Ruby, and Javascript + +👉 Easy to set alerts with DIY query builder + + +### Application Metrics + +![application_metrics](https://user-images.githubusercontent.com/83692067/226637410-900dbc5e-6705-4b11-a10c-bd0faeb2a92f.png) + + +### Distributed Tracing +distributed_tracing_2 2 + +distributed_tracing_1 + +### Logs Management + +logs_management + +### Infrastructure Monitoring + +infrastructure_monitoring + +### Exceptions Monitoring + +![exceptions_light](https://user-images.githubusercontent.com/83692067/226637967-4188d024-3ac9-4799-be95-f5ea9c45436f.png) + + +### Alerts + +alerts_management + + +

+ + +## Join our Slack community + +Come say Hi to us on [Slack](https://signoz.io/slack) 👋 + +

+ + +## Features: + +- Unified UI for metrics, traces and logs. No need to switch from Prometheus to Jaeger to debug issues, or use a logs tool like Elastic separate from your metrics and traces stack. +- Application overview metrics like RPS, 50th/90th/99th Percentile latencies, and Error Rate +- Slowest endpoints in your application +- See exact request trace to figure out issues in downstream services, slow DB queries, call to 3rd party services like payment gateways, etc +- Filter traces by service name, operation, latency, error, tags/annotations. +- Run aggregates on trace data (events/spans) to get business relevant metrics. e.g. You can get error rate and 99th percentile latency of `customer_type: gold` or `deployment_version: v2` or `external_call: paypal` +- Native support for OpenTelemetry Logs, advanced log query builder, and automatic log collection from k8s cluster +- Lightning quick log analytics ([Logs Perf. Benchmark](https://signoz.io/blog/logs-performance-benchmark/)) +- End-to-End visibility into infrastructure performance, ingest metrics from all kinds of host environments +- Easy to set alerts with DIY query builder + +

+ + +## Why SigNoz? + +Being developers, we found it annoying to rely on closed source SaaS vendors for every small feature we wanted. Closed source vendors often surprise you with huge month end bills without any transparency. + +We wanted to make a self-hosted & open source version of tools like DataDog, NewRelic for companies that have privacy and security concerns about having customer data going to third party services. + +Being open source also gives you complete control of your configuration, sampling, uptimes. You can also build modules over SigNoz to extend business specific capabilities + +### Languages supported: + +We support [OpenTelemetry](https://opentelemetry.io) as the library which you can use to instrument your applications. So any framework and language supported by OpenTelemetry is also supported by SigNoz. Some of the main supported languages are: + +- Java +- Python +- Node.js +- Go +- PHP +- .NET +- Ruby +- Elixir +- Rust + + +You can find the complete list of languages here - https://opentelemetry.io/docs/ + +

+ + +## Getting Started + +### Deploy using Docker + +Please follow the steps listed [here](https://signoz.io/docs/install/docker/) to install using docker + +The [troubleshooting instructions](https://signoz.io/docs/install/troubleshooting/) may be helpful if you face any issues. + +

 

+ + +### Deploy in Kubernetes using Helm + +Please follow the steps listed [here](https://signoz.io/docs/deployment/helm_chart) to install using helm charts + +

+ + +## Comparisons to Familiar Tools + +### SigNoz vs Prometheus + +Prometheus is good if you want to do just metrics. But if you want to have a seamless experience between metrics and traces, then current experience of stitching together Prometheus & Jaeger is not great. + +Our goal is to provide an integrated UI between metrics & traces - similar to what SaaS vendors like Datadog provides - and give advanced filtering and aggregation over traces, something which Jaeger currently lack. + +

 

+ +### SigNoz vs Jaeger + +Jaeger only does distributed tracing. SigNoz supports metrics, traces and logs - all the 3 pillars of observability. + +Moreover, SigNoz has few more advanced features wrt Jaeger: + +- Jaegar UI doesn’t show any metrics on traces or on filtered traces +- Jaeger can’t get aggregates on filtered traces. For example, p99 latency of requests which have tag - customer_type='premium'. This can be done easily on SigNoz + +

 

+ +### SigNoz vs Elastic + +- SigNoz Logs management are based on ClickHouse, a columnar OLAP datastore which makes aggregate log analytics queries much more efficient +- 50% lower resource requirement compared to Elastic during ingestion + +We have published benchmarks comparing Elastic with SigNoz. Check it out [here](https://signoz.io/blog/logs-performance-benchmark/?utm_source=github-readme&utm_medium=logs-benchmark) + +

 

+ +### SigNoz vs Loki + +- SigNoz supports aggregations on high-cardinality data over a huge volume while loki doesn’t. +- SigNoz supports indexes over high cardinality data and has no limitations on the number of indexes, while Loki reaches max streams with a few indexes added to it. +- Searching over a huge volume of data is difficult and slow in Loki compared to SigNoz + +We have published benchmarks comparing Loki with SigNoz. Check it out [here](https://signoz.io/blog/logs-performance-benchmark/?utm_source=github-readme&utm_medium=logs-benchmark) + +

+ + +## Contributing + +We ❤️ contributions big or small. Please read [CONTRIBUTING.md](CONTRIBUTING.md) to get started with making contributions to SigNoz. + +Not sure how to get started? Just ping us on `#contributing` in our [slack community](https://signoz.io/slack) + +### Project maintainers + +#### Backend + +- [Ankit Nayan](https://github.com/ankitnayan) +- [Nityananda Gohain](https://github.com/nityanandagohain) +- [Srikanth Chekuri](https://github.com/srikanthccv) +- [Vishal Sharma](https://github.com/makeavish) + +#### Frontend + +- [Yunus M](https://github.com/YounixM) +- [Vikrant Gupta](https://github.com/vikrantgupta25) +- [Sagar Rajput](https://github.com/SagarRajput-7) + +#### DevOps + +- [Prashant Shahi](https://github.com/prashant-shahi) +- [Vibhu Pandey](https://github.com/grandwizard28) + +

+ + +## Documentation + +You can find docs at https://signoz.io/docs/. If you need any clarification or find something missing, feel free to raise a GitHub issue with the label `documentation` or reach out to us at the community slack channel. + +

+ + +## Community + +Join the [slack community](https://signoz.io/slack) to know more about distributed tracing, observability, or SigNoz and to connect with other users and contributors. + +If you have any ideas, questions, or any feedback, please share on our [Github Discussions](https://github.com/SigNoz/signoz/discussions) + +As always, thanks to our amazing contributors! + + + + diff --git a/signoz/README.pt-br.md b/signoz/README.pt-br.md new file mode 100644 index 0000000..c817e8a --- /dev/null +++ b/signoz/README.pt-br.md @@ -0,0 +1,158 @@ +

+ SigNoz-logo + +

Monitore seus aplicativos e solucione problemas em seus aplicativos implantados, uma alternativa de código aberto para soluções como DataDog, New Relic, entre outras.

+

+ +

+ Downloads + GitHub issues + + tweet +

+ + +

+ Documentação • + Comunidade no Slack • + Twitter +

+ +## + +SigNoz auxilia os desenvolvedores a monitorarem aplicativos e solucionar problemas em seus aplicativos implantados. SigNoz usa rastreamento distribuído para obter visibilidade em sua pilha de software. + +👉 Você pode verificar métricas como latência p99, taxas de erro em seus serviços, requisições às APIs externas e endpoints individuais. + +👉 Você pode encontrar a causa raiz do problema acessando os rastreamentos exatos que estão causando o problema e verificar os quadros detalhados de cada requisição individual. + +👉 Execute agregações em dados de rastreamento para obter métricas de negócios relevantes. + + +![SigNoz Feature](https://signoz-public.s3.us-east-2.amazonaws.com/signoz_hero_github.png) + +

+ + + +## Junte-se à nossa comunidade no Slack + +Venha dizer oi para nós no [Slack](https://signoz.io/slack) 👋 + +

+ + + +## Funções: + +- Métricas de visão geral do aplicativo, como RPS, latências de percentual 50/90/99 e taxa de erro +- Endpoints mais lentos em seu aplicativo +- Visualize o rastreamento preciso de requisições de rede para descobrir problemas em serviços downstream, consultas lentas de banco de dados, chamadas para serviços de terceiros, como gateways de pagamento, etc. +- Filtre os rastreamentos por nome de serviço, operação, latência, erro, tags / anotações. +- Execute agregações em dados de rastreamento (eventos / extensões) para obter métricas de negócios relevantes, como por exemplo, você pode obter a taxa de erro e a latência do 99º percentil de `customer_type: gold` or `deployment_version: v2` or `external_call: paypal` +- Interface de Usuário unificada para métricas e rastreios. Não há necessidade de mudar de Prometheus para Jaeger para depurar problemas. + +

+ + + +## Por que escolher SigNoz? + +Sendo desenvolvedores, achamos irritante contar com fornecedores de SaaS de código fechado para cada pequeno recurso que queríamos. Fornecedores de código fechado costumam surpreendê-lo com enormes contas no final do mês de uso sem qualquer transparência . + +Queríamos fazer uma versão auto-hospedada e de código aberto de ferramentas como DataDog, NewRelic para empresas que têm preocupações com privacidade e segurança em ter dados de clientes indo para serviços de terceiros. + +Ser open source também oferece controle completo de sua configuração, amostragem e tempos de atividade. Você também pode construir módulos sobre o SigNoz para estender recursos específicos do negócio. + +### Linguagens Suportadas: + +Nós apoiamos a biblioteca [OpenTelemetry](https://opentelemetry.io) como a biblioteca que você pode usar para instrumentar seus aplicativos. Em outras palavras, SigNoz oferece suporte a qualquer framework e linguagem que suporte a biblioteca OpenTelemetry. As principais linguagens suportadas incluem: + +- Java +- Python +- NodeJS +- Go + +Você pode encontrar a lista completa de linguagens aqui - https://opentelemetry.io/docs/ + +

+ + + +## Iniciando + + +### Implantar usando Docker + +Siga as etapas listadas [aqui](https://signoz.io/docs/install/docker/) para instalar usando o Docker. + +Esse [guia para solução de problemas](https://signoz.io/docs/install/troubleshooting/) pode ser útil se você enfrentar quaisquer problemas. + +

 

+ + +### Implentar no Kubernetes usando Helm + +Siga as etapas listadas [aqui](https://signoz.io/docs/deployment/helm_chart) para instalar usando helm charts. + + +

+ + + +## Comparações com ferramentas similares + +### SigNoz ou Prometheus + +Prometheus é bom se você quiser apenas fazer métricas. Mas se você quiser ter uma experiência perfeita entre métricas e rastreamentos, a experiência atual de unir Prometheus e Jaeger não é ótima. + +Nosso objetivo é fornecer uma interface do usuário integrada entre métricas e rastreamentos - semelhante ao que fornecedores de SaaS como o Datadog fornecem - e fornecer filtragem e agregação avançada sobre rastreamentos, algo que a Jaeger atualmente carece. + +

 

+ +### SigNoz ou Jaeger + +Jaeger só faz rastreamento distribuído. SigNoz faz métricas e rastreia, e também temos gerenciamento de log em nossos planos. + +Além disso, SigNoz tem alguns recursos mais avançados do que Jaeger: + +- A interface de usuário do Jaegar não mostra nenhuma métrica em traces ou em traces filtrados +- Jaeger não pode obter agregados em rastros filtrados. Por exemplo, latência p99 de solicitações que possuem tag - customer_type='premium'. Isso pode ser feito facilmente com SigNoz. + +

+ + + +## Contribuindo + + +Nós ❤️ contribuições grandes ou pequenas. Leia [CONTRIBUTING.md](CONTRIBUTING.md) para começar a fazer contribuições para o SigNoz. + +Não sabe como começar? Basta enviar um sinal para nós no canal `#contributing` em nossa [comunidade no Slack.](https://signoz.io/slack) + +

+ + + +## Documentação + +Você pode encontrar a documentação em https://signoz.io/docs/. Se você tiver alguma dúvida ou sentir falta de algo, sinta-se à vontade para criar uma issue com a tag `documentation` no GitHub ou entre em contato conosco no canal da comunidade no Slack. + +

+ + + +## Comunidade + +Junte-se a [comunidade no Slack](https://signoz.io/slack) para saber mais sobre rastreamento distribuído, observabilidade ou SigNoz e para se conectar com outros usuários e colaboradores. + +Se você tiver alguma ideia, pergunta ou feedback, compartilhe em nosso [Github Discussões](https://github.com/SigNoz/signoz/discussions) + +Como sempre, obrigado aos nossos incríveis colaboradores! + + + + + + + diff --git a/signoz/README.zh-cn.md b/signoz/README.zh-cn.md new file mode 100644 index 0000000..445474f --- /dev/null +++ b/signoz/README.zh-cn.md @@ -0,0 +1,227 @@ +SigNoz-logo + +

监控你的应用,并且可排查已部署应用的问题,这是一个可替代 DataDog、NewRelic 的开源方案

+

+ +

+ Downloads + GitHub issues + + tweet +

+ +

+ 文档 • + 中文ReadMe • + 德文ReadMe • + 葡萄牙语ReadMe • + Slack 社区 • + Twitter +

+ +## + +SigNoz 帮助开发人员监控应用并排查已部署应用的问题。你可以使用 SigNoz 实现如下能力: + +👉 在同一块面板上,可视化 Metrics, Traces 和 Logs 内容。 + +👉 你可以关注服务的 p99 延迟和错误率, 包括外部 API 调用和个别的端点。 + +👉 你可以找到问题的根因,通过提取相关问题的 traces 日志、单独查看请求 traces 的火焰图详情。 + +👉 执行 trace 数据聚合,以获取业务相关的 metrics + +👉 对日志过滤和查询,通过日志的属性建立看板和告警 + +👉 通过 Python,java,Ruby 和 Javascript 自动记录异常 + +👉 轻松的自定义查询和设置告警 + +### 应用 Metrics 展示 + +![application_metrics](https://user-images.githubusercontent.com/83692067/226637410-900dbc5e-6705-4b11-a10c-bd0faeb2a92f.png) + +### 分布式追踪 + +distributed_tracing_2 2 + +distributed_tracing_1 + +### 日志管理 + +logs_management + +### 基础设施监控 + +infrastructure_monitoring + +### 异常监控 + +![exceptions_light](https://user-images.githubusercontent.com/83692067/226637967-4188d024-3ac9-4799-be95-f5ea9c45436f.png) + +### 告警 + +alerts_management + +

+ +## 加入我们 Slack 社区 + +来 [Slack](https://signoz.io/slack) 和我们打招呼吧 👋 + +

+ +## 特性: + +- 为 metrics, traces and logs 制定统一的 UI。 无需切换 Prometheus 到 Jaeger 去查找问题,也无需使用想 Elastic 这样的日志工具分开你的 metrics 和 traces + +- 默认统计应用的 metrics 数据,像 RPS (每秒请求数), 50th/90th/99th 的分位数延迟数据,还有相关的错误率 + +- 找到应用中最慢的端点 + +- 查看准确的请求跟踪数据,找到下游服务的问题了,比如 DB 慢查询,或者调用第三方的支付网关等 + +- 通过 服务名、操作方式、延迟、错误、标签/注释 过滤 traces 数据 + +- 通过聚合 trace 数据而获得业务相关的 metrics。 比如你可以通过 `customer_type: gold` 或者 `deployment_version: v2` 或者 `external_call: paypal` 获取错误率和 P99 延迟数据 + +- 原生支持 OpenTelemetry 日志,高级日志查询,自动收集 k8s 相关日志 + +- 快如闪电的日志分析 ([Logs Perf. Benchmark](https://signoz.io/blog/logs-performance-benchmark/)) + +- 可视化点到点的基础设施性能,提取有所有类型机器的 metrics 数据 + +- 轻易自定义告警查询 + +

+ +## 为什么使用 SigNoz? + +作为开发者, 我们发现 SaaS 厂商对一些大家想要的小功能都是闭源的,这种行为真的让人有点恼火。 闭源厂商还会在月底给你一张没有明细的巨额账单。 + +我们想做一个自托管并且可开源的工具,像 DataDog 和 NewRelic 那样, 为那些担心数据隐私和安全的公司提供第三方服务。 + +作为开源的项目,你完全可以自己掌控你的配置、样本和更新。你同样可以基于 SigNoz 拓展特定的业务模块。 + +### 支持的编程语言: + +我们支持 [OpenTelemetry](https://opentelemetry.io)。作为一个观测你应用的库文件。所以任何 OpenTelemetry 支持的框架和语言,对于 SigNoz 也同样支持。 一些主要支持的语言如下: + +- Java +- Python +- NodeJS +- Go +- PHP +- .NET +- Ruby +- Elixir +- Rust + +你可以在这里找到全部支持的语言列表 - https://opentelemetry.io/docs/ + +

+ +## 让我们开始吧 + +### 使用 Docker 部署 + +请一步步跟随 [这里](https://signoz.io/docs/install/docker/) 通过 docker 来安装。 + +这个 [排障说明书](https://signoz.io/docs/install/troubleshooting/) 可以帮助你解决碰到的问题。 + +

 

+ +### 使用 Helm 在 Kubernetes 部署 + +请一步步跟随 [这里](https://signoz.io/docs/deployment/helm_chart) 通过 helm 来安装 + +

+ +## 比较相似的工具 + +### SigNoz vs Prometheus + +Prometheus 是一个针对 metrics 监控的强大工具。但是如果你想无缝的切换 metrics 和 traces 查询,你当前大概率需要在 Prometheus 和 Jaeger 之间切换。 + +我们的目标是提供一个客户观测 metrics 和 traces 整合的 UI。就像 SaaS 供应商 DataDog,它提供很多 jaeger 缺失的功能,比如针对 traces 过滤功能和聚合功能。 + +

 

+ +### SigNoz vs Jaeger + +Jaeger 仅仅是一个分布式追踪系统。 但是 SigNoz 可以提供 metrics, traces 和 logs 所有的观测。 + +而且, SigNoz 相较于 Jaeger 拥有更对的高级功能: + +- Jaegar UI 不能提供任何基于 traces 的 metrics 查询和过滤。 + +- Jaeger 不能针对过滤的 traces 做聚合。 比如, p99 延迟的请求有个标签是 customer_type='premium'。 而这些在 SigNoz 可以轻松做到。 + +

 

+ +### SigNoz vs Elastic + +- SigNoz 的日志管理是基于 ClickHouse 实现的,可以使日志的聚合更加高效,因为它是基于 OLAP 的数据仓储。 + +- 与 Elastic 相比,可以节省 50% 的资源成本 + +我们已经公布了 Elastic 和 SigNoz 的性能对比。 请点击 [这里](https://signoz.io/blog/logs-performance-benchmark/?utm_source=github-readme&utm_medium=logs-benchmark) + +

 

+ +### SigNoz vs Loki + +- SigNoz 支持大容量高基数的聚合,但是 loki 是不支持的。 + +- SigNoz 支持索引的高基数查询,并且对索引没有数量限制,而 Loki 会在添加部分索引后到达最大上限。 + +- 相较于 SigNoz,Loki 在搜索大量数据下既困难又缓慢。 + +我们已经发布了基准测试对比 Loki 和 SigNoz 性能。请点击 [这里](https://signoz.io/blog/logs-performance-benchmark/?utm_source=github-readme&utm_medium=logs-benchmark) + +

+ +## 贡献 + +我们 ❤️ 你的贡献,无论大小。 请先阅读 [CONTRIBUTING.md](CONTRIBUTING.md) 再开始给 SigNoz 做贡献。 + +如果你不知道如何开始? 只需要在 [slack 社区](https://signoz.io/slack) 通过 `#contributing` 频道联系我们。 + +### 项目维护人员 + +#### 后端 + +- [Ankit Nayan](https://github.com/ankitnayan) +- [Nityananda Gohain](https://github.com/nityanandagohain) +- [Srikanth Chekuri](https://github.com/srikanthccv) +- [Vishal Sharma](https://github.com/makeavish) + +#### 前端 + +- [Palash Gupta](https://github.com/palashgdev) +- [Yunus M](https://github.com/YounixM) +- [Rajat Dabade](https://github.com/Rajat-Dabade) + +#### 运维开发 + +- [Prashant Shahi](https://github.com/prashant-shahi) + +

+ +## 文档 + +你可以通过 https://signoz.io/docs/ 找到相关文档。如果你需要阐述问题或者发现一些确实的事件, 通过标签为 `documentation` 提交 Github 问题。或者通过 slack 社区频道。 + +

+ +## 社区 + +加入 [slack 社区](https://signoz.io/slack) 去了解更多关于分布式追踪、可观测性系统 。或者与 SigNoz 其他用户和贡献者交流。 + +如果你有任何想法、问题、或者任何反馈, 请通过 [Github Discussions](https://github.com/SigNoz/signoz/discussions) 分享。 + +不管怎么样,感谢这个项目的所有贡献者! + + + + diff --git a/signoz/SECURITY.md b/signoz/SECURITY.md new file mode 100644 index 0000000..000076f --- /dev/null +++ b/signoz/SECURITY.md @@ -0,0 +1,18 @@ +# Security Policy + +SigNoz is looking forward to working with security researchers across the world to keep SigNoz and our users safe. If you have found an issue in our systems/applications, please reach out to us. + +## Supported Versions +We always recommend using the latest version of SigNoz to ensure you get all security updates + +## Reporting a Vulnerability + +If you believe you have found a security vulnerability within SigNoz, please let us know right away. We'll try and fix the problem as soon as possible. + +**Do not report vulnerabilities using public GitHub issues**. Instead, email with a detailed account of the issue. Please submit one issue per email, this helps us triage vulnerabilities. + +Once we've received your email we'll keep you updated as we fix the vulnerability. + +## Thanks + +Thank you for keeping SigNoz and our users safe. 🙇 diff --git a/signoz/deploy/README.md b/signoz/deploy/README.md new file mode 100644 index 0000000..5e6740e --- /dev/null +++ b/signoz/deploy/README.md @@ -0,0 +1,82 @@ +# Deploy + +Check that you have cloned [signoz/signoz](https://github.com/signoz/signoz) +and currently are in `signoz/deploy` folder. + +## Docker + +If you don't have docker set up, please follow [this guide](https://docs.docker.com/engine/install/) +to set up docker before proceeding with the next steps. + +### Using Install Script + +Now run the following command to install: + +```sh +./install.sh +``` + +### Using Docker Compose + +If you don't have docker-compose set up, please follow [this guide](https://docs.docker.com/compose/install/) +to set up docker compose before proceeding with the next steps. + +For x86 chip (amd): + +```sh +docker-compose -f docker/clickhouse-setup/docker-compose.yaml up -d +``` + +Open http://localhost:3301 in your favourite browser. In couple of minutes, you should see +the data generated from hotrod in SigNoz UI. + +## Kubernetes + +### Using Helm + +#### Bring up SigNoz cluster + +```sh +helm repo add signoz https://charts.signoz.io + +kubectl create ns platform + +helm -n platform install my-release signoz/signoz +``` + +To access the UI, you can `port-forward` the frontend service: + +```sh +kubectl -n platform port-forward svc/my-release-frontend 3301:3301 +``` + +Open http://localhost:3301 in your favourite browser. Few minutes after you generate load +from the HotROD application, you should see the data generated from hotrod in SigNoz UI. + +#### Test HotROD application with SigNoz + +```sh +kubectl create ns sample-application + +kubectl -n sample-application apply -f https://raw.githubusercontent.com/SigNoz/signoz/develop/sample-apps/hotrod/hotrod.yaml +``` + +To generate load: + +```sh +kubectl -n sample-application run strzal --image=djbingham/curl \ +--restart='OnFailure' -i --tty --rm --command -- curl -X POST -F \ +'user_count=6' -F 'spawn_rate=2' http://locust-master:8089/swarm +``` + +To stop load: + +```sh +kubectl -n sample-application run strzal --image=djbingham/curl \ + --restart='OnFailure' -i --tty --rm --command -- curl \ + http://locust-master:8089/stop +``` + +## Uninstall/Troubleshoot? + +Go to our official documentation site [signoz.io/docs](https://signoz.io/docs) for more. diff --git a/signoz/deploy/docker-swarm/clickhouse-setup/alertmanager.yml b/signoz/deploy/docker-swarm/clickhouse-setup/alertmanager.yml new file mode 100644 index 0000000..d69357f --- /dev/null +++ b/signoz/deploy/docker-swarm/clickhouse-setup/alertmanager.yml @@ -0,0 +1,35 @@ +global: + resolve_timeout: 1m + slack_api_url: 'https://hooks.slack.com/services/xxx' + +route: + receiver: 'slack-notifications' + +receivers: +- name: 'slack-notifications' + slack_configs: + - channel: '#alerts' + send_resolved: true + icon_url: https://avatars3.githubusercontent.com/u/3380462 + title: |- + [{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }} + {{- if gt (len .CommonLabels) (len .GroupLabels) -}} + {{" "}}( + {{- with .CommonLabels.Remove .GroupLabels.Names }} + {{- range $index, $label := .SortedPairs -}} + {{ if $index }}, {{ end }} + {{- $label.Name }}="{{ $label.Value -}}" + {{- end }} + {{- end -}} + ) + {{- end }} + text: >- + {{ range .Alerts -}} + *Alert:* {{ .Annotations.title }}{{ if .Labels.severity }} - `{{ .Labels.severity }}`{{ end }} + + *Description:* {{ .Annotations.description }} + + *Details:* + {{ range .Labels.SortedPairs }} • *{{ .Name }}:* `{{ .Value }}` + {{ end }} + {{ end }} \ No newline at end of file diff --git a/signoz/deploy/docker-swarm/clickhouse-setup/alerts.yml b/signoz/deploy/docker-swarm/clickhouse-setup/alerts.yml new file mode 100644 index 0000000..810a207 --- /dev/null +++ b/signoz/deploy/docker-swarm/clickhouse-setup/alerts.yml @@ -0,0 +1,11 @@ +groups: +- name: ExampleCPULoadGroup + rules: + - alert: HighCpuLoad + expr: system_cpu_load_average_1m > 0.1 + for: 0m + labels: + severity: warning + annotations: + summary: High CPU load + description: "CPU load is > 0.1\n VALUE = {{ $value }}\n LABELS = {{ $labels }}" diff --git a/signoz/deploy/docker-swarm/clickhouse-setup/clickhouse-cluster.xml b/signoz/deploy/docker-swarm/clickhouse-setup/clickhouse-cluster.xml new file mode 100644 index 0000000..0e3ddcd --- /dev/null +++ b/signoz/deploy/docker-swarm/clickhouse-setup/clickhouse-cluster.xml @@ -0,0 +1,75 @@ + + + + + + zookeeper-1 + 2181 + + + + + + + + + + + + + + + + clickhouse + 9000 + + + + + + + + \ No newline at end of file diff --git a/signoz/deploy/docker-swarm/clickhouse-setup/clickhouse-config.xml b/signoz/deploy/docker-swarm/clickhouse-setup/clickhouse-config.xml new file mode 100644 index 0000000..dd2b1bd --- /dev/null +++ b/signoz/deploy/docker-swarm/clickhouse-setup/clickhouse-config.xml @@ -0,0 +1,1139 @@ + + + + + + information + /var/log/clickhouse-server/clickhouse-server.log + /var/log/clickhouse-server/clickhouse-server.err.log + + 1000M + 10 + + + + + + + + + + + + + + + + + + 8123 + + + 9000 + + + 9004 + + + 9005 + + + + + + + + + + + + 9009 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 4096 + + + 3 + + + + + false + + + /path/to/ssl_cert_file + /path/to/ssl_key_file + + + false + + + /path/to/ssl_ca_cert_file + + + none + + + 0 + + + -1 + -1 + + + false + + + + + + + + + + + none + true + true + sslv2,sslv3 + true + + + + true + true + sslv2,sslv3 + true + + + + RejectCertificateHandler + + + + + + + + + 100 + + + 0 + + + + 10000 + + + + + + 0.9 + + + 4194304 + + + 0 + + + + + + 8589934592 + + + 5368709120 + + + + 1000 + + + 134217728 + + + 10000 + + + /var/lib/clickhouse/ + + + /var/lib/clickhouse/tmp/ + + + + ` + + + + + + /var/lib/clickhouse/user_files/ + + + + + + + + + + + + + users.xml + + + + /var/lib/clickhouse/access/ + + + + + + + default + + + + + + + + + + + + default + + + + + + + + + true + + + false + + ' | sed -e 's|.*>\(.*\)<.*|\1|') + wget https://github.com/ClickHouse/clickhouse-jdbc-bridge/releases/download/v$PKG_VER/clickhouse-jdbc-bridge_$PKG_VER-1_all.deb + apt install --no-install-recommends -f ./clickhouse-jdbc-bridge_$PKG_VER-1_all.deb + clickhouse-jdbc-bridge & + + * [CentOS/RHEL] + export MVN_URL=https://repo1.maven.org/maven2/ru/yandex/clickhouse/clickhouse-jdbc-bridge + export PKG_VER=$(curl -sL $MVN_URL/maven-metadata.xml | grep '' | sed -e 's|.*>\(.*\)<.*|\1|') + wget https://github.com/ClickHouse/clickhouse-jdbc-bridge/releases/download/v$PKG_VER/clickhouse-jdbc-bridge-$PKG_VER-1.noarch.rpm + yum localinstall -y clickhouse-jdbc-bridge-$PKG_VER-1.noarch.rpm + clickhouse-jdbc-bridge & + + Please refer to https://github.com/ClickHouse/clickhouse-jdbc-bridge#usage for more information. + ]]> + + + + + + + + + + + + + + + + + 3600 + + + + 3600 + + + 60 + + + + + + + + + + + + + system + query_log
+ + toYYYYMM(event_date) + + + + + + 7500 +
+ + + + system + trace_log
+ + toYYYYMM(event_date) + 7500 +
+ + + + system + query_thread_log
+ toYYYYMM(event_date) + 7500 +
+ + + + system + query_views_log
+ toYYYYMM(event_date) + 7500 +
+ + + + system + part_log
+ toYYYYMM(event_date) + 7500 +
+ + + + + + system + metric_log
+ 7500 + 1000 +
+ + + + system + asynchronous_metric_log
+ + 7000 +
+ + + + + + engine MergeTree + partition by toYYYYMM(finish_date) + order by (finish_date, finish_time_us, trace_id) + + system + opentelemetry_span_log
+ 7500 +
+ + + + + system + crash_log
+ + + 1000 +
+ + + + + + + system + processors_profile_log
+ + toYYYYMM(event_date) + 7500 +
+ + + + + + + + + *_dictionary.xml + + + *_function.xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + /clickhouse/task_queue/ddl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + click_cost + any + + 0 + 3600 + + + 86400 + 60 + + + + max + + 0 + 60 + + + 3600 + 300 + + + 86400 + 3600 + + + + + + /var/lib/clickhouse/format_schemas/ + + + + + hide encrypt/decrypt arguments + ((?:aes_)?(?:encrypt|decrypt)(?:_mysql)?)\s*\(\s*(?:'(?:\\'|.)+'|.*?)\s*\) + + \1(???) + + + + + + + + + + false + + false + + + https://6f33034cfe684dd7a3ab9875e57b1c8d@o388870.ingest.sentry.io/5226277 + + + + + + + + + + + 268435456 + true + +
diff --git a/signoz/deploy/docker-swarm/clickhouse-setup/clickhouse-storage.xml b/signoz/deploy/docker-swarm/clickhouse-setup/clickhouse-storage.xml new file mode 100644 index 0000000..54ec497 --- /dev/null +++ b/signoz/deploy/docker-swarm/clickhouse-setup/clickhouse-storage.xml @@ -0,0 +1,41 @@ + + + + + + 10485760 + + + s3 + + https://BUCKET-NAME.s3-REGION-NAME.amazonaws.com/data/ + ACCESS-KEY-ID + SECRET-ACCESS-KEY + + + + + + + + + + + default + + + s3 + 0 + + + + + + diff --git a/signoz/deploy/docker-swarm/clickhouse-setup/clickhouse-users.xml b/signoz/deploy/docker-swarm/clickhouse-setup/clickhouse-users.xml new file mode 100644 index 0000000..f185620 --- /dev/null +++ b/signoz/deploy/docker-swarm/clickhouse-setup/clickhouse-users.xml @@ -0,0 +1,123 @@ + + + + + + + + + + 10000000000 + + + random + + + + + 1 + + + + + + + + + + + + + ::/0 + + + + default + + + default + + + + + + + + + + + + + + 3600 + + + 0 + 0 + 0 + 0 + 0 + + + + diff --git a/signoz/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/signoz/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml new file mode 100644 index 0000000..798aa12 --- /dev/null +++ b/signoz/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -0,0 +1,287 @@ +version: "3.9" + +x-clickhouse-defaults: &clickhouse-defaults + image: clickhouse/clickhouse-server:24.1.2-alpine + tty: true + deploy: + restart_policy: + condition: on-failure + depends_on: + - zookeeper-1 + # - zookeeper-2 + # - zookeeper-3 + logging: + options: + max-size: 50m + max-file: "3" + healthcheck: + # "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'" + test: + [ + "CMD", + "wget", + "--spider", + "-q", + "0.0.0.0:8123/ping" + ] + interval: 30s + timeout: 5s + retries: 3 + ulimits: + nproc: 65535 + nofile: + soft: 262144 + hard: 262144 + +x-db-depend: &db-depend + depends_on: + - clickhouse + - otel-collector-migrator + # - clickhouse-2 + # - clickhouse-3 + + +services: + zookeeper-1: + image: bitnami/zookeeper:3.7.1 + hostname: zookeeper-1 + user: root + ports: + - "2181:2181" + - "2888:2888" + - "3888:3888" + volumes: + - ./data/zookeeper-1:/bitnami/zookeeper + environment: + - ZOO_SERVER_ID=1 + # - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888 + - ALLOW_ANONYMOUS_LOGIN=yes + - ZOO_AUTOPURGE_INTERVAL=1 + + # zookeeper-2: + # image: bitnami/zookeeper:3.7.0 + # hostname: zookeeper-2 + # user: root + # ports: + # - "2182:2181" + # - "2889:2888" + # - "3889:3888" + # volumes: + # - ./data/zookeeper-2:/bitnami/zookeeper + # environment: + # - ZOO_SERVER_ID=2 + # - ZOO_SERVERS=zookeeper-1:2888:3888,0.0.0.0:2888:3888,zookeeper-3:2888:3888 + # - ALLOW_ANONYMOUS_LOGIN=yes + # - ZOO_AUTOPURGE_INTERVAL=1 + + # zookeeper-3: + # image: bitnami/zookeeper:3.7.0 + # hostname: zookeeper-3 + # user: root + # ports: + # - "2183:2181" + # - "2890:2888" + # - "3890:3888" + # volumes: + # - ./data/zookeeper-3:/bitnami/zookeeper + # environment: + # - ZOO_SERVER_ID=3 + # - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888 + # - ALLOW_ANONYMOUS_LOGIN=yes + # - ZOO_AUTOPURGE_INTERVAL=1 + + clickhouse: + <<: *clickhouse-defaults + hostname: clickhouse + # ports: + # - "9000:9000" + # - "8123:8123" + # - "9181:9181" + volumes: + - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml + # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml + - ./data/clickhouse/:/var/lib/clickhouse/ + + # clickhouse-2: + # <<: *clickhouse-defaults + # hostname: clickhouse-2 + # ports: + # - "9001:9000" + # - "8124:8123" + # - "9182:9181" + # volumes: + # - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + # - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + # - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml + # # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml + # - ./data/clickhouse-2/:/var/lib/clickhouse/ + + # clickhouse-3: + # <<: *clickhouse-defaults + # hostname: clickhouse-3 + # ports: + # - "9002:9000" + # - "8125:8123" + # - "9183:9181" + # volumes: + # - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + # - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + # - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml + # # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml + # - ./data/clickhouse-3/:/var/lib/clickhouse/ + + alertmanager: + image: signoz/alertmanager:0.23.5 + volumes: + - ./data/alertmanager:/data + command: + - --queryService.url=http://query-service:8085 + - --storage.path=/data + depends_on: + - query-service + deploy: + restart_policy: + condition: on-failure + + query-service: + image: signoz/query-service:0.51.0 + command: + [ + "-config=/root/config/prometheus.yml", + # "--prefer-delta=true" + ] + # ports: + # - "6060:6060" # pprof port + # - "8080:8080" # query-service port + volumes: + - ./prometheus.yml:/root/config/prometheus.yml + - ../dashboards:/root/config/dashboards + - ./data/signoz/:/var/lib/signoz/ + environment: + - ClickHouseUrl=tcp://clickhouse:9000 + - ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/ + - SIGNOZ_LOCAL_DB_PATH=/var/lib/signoz/signoz.db + - DASHBOARDS_PATH=/root/config/dashboards + - STORAGE=clickhouse + - GODEBUG=netdns=go + - TELEMETRY_ENABLED=true + - DEPLOYMENT_TYPE=docker-swarm + healthcheck: + test: + [ + "CMD", + "wget", + "--spider", + "-q", + "localhost:8080/api/v1/health" + ] + interval: 30s + timeout: 5s + retries: 3 + deploy: + restart_policy: + condition: on-failure + <<: *db-depend + + frontend: + image: signoz/frontend:0.48.0 + deploy: + restart_policy: + condition: on-failure + depends_on: + - alertmanager + - query-service + ports: + - "3301:3301" + volumes: + - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf + + otel-collector: + image: signoz/signoz-otel-collector:0.102.3 + command: + [ + "--config=/etc/otel-collector-config.yaml", + "--manager-config=/etc/manager-config.yaml", + "--feature-gates=-pkg.translator.prometheus.NormalizeName" + ] + user: root # required for reading docker container logs + volumes: + - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml + - ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml + - /var/lib/docker/containers:/var/lib/docker/containers:ro + environment: + - OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}},dockerswarm.service.name={{.Service.Name}},dockerswarm.task.name={{.Task.Name}} + - DOCKER_MULTI_NODE_CLUSTER=false + - LOW_CARDINAL_EXCEPTION_GROUPING=false + ports: + # - "1777:1777" # pprof extension + - "4317:4317" # OTLP gRPC receiver + - "4318:4318" # OTLP HTTP receiver + # - "8888:8888" # OtelCollector internal metrics + # - "8889:8889" # signoz spanmetrics exposed by the agent + # - "9411:9411" # Zipkin port + # - "13133:13133" # Health check extension + # - "14250:14250" # Jaeger gRPC + # - "14268:14268" # Jaeger thrift HTTP + # - "55678:55678" # OpenCensus receiver + # - "55679:55679" # zPages extension + deploy: + mode: global + restart_policy: + condition: on-failure + depends_on: + - clickhouse + - otel-collector-migrator + - query-service + + otel-collector-migrator: + image: signoz/signoz-schema-migrator:0.102.3 + deploy: + restart_policy: + condition: on-failure + delay: 5s + command: + - "--dsn=tcp://clickhouse:9000" + depends_on: + - clickhouse + # - clickhouse-2 + # - clickhouse-3 + + logspout: + image: "gliderlabs/logspout:v3.2.14" + volumes: + - /etc/hostname:/etc/host_hostname:ro + - /var/run/docker.sock:/var/run/docker.sock + command: syslog+tcp://otel-collector:2255 + depends_on: + - otel-collector + deploy: + mode: global + restart_policy: + condition: on-failure + + hotrod: + image: jaegertracing/example-hotrod:1.30 + command: [ "all" ] + environment: + - JAEGER_ENDPOINT=http://otel-collector:14268/api/traces + logging: + options: + max-size: 50m + max-file: "3" + + load-hotrod: + image: "signoz/locust:1.2.3" + hostname: load-hotrod + environment: + ATTACKED_HOST: http://hotrod:8080 + LOCUST_MODE: standalone + NO_PROXY: standalone + TASK_DELAY_FROM: 5 + TASK_DELAY_TO: 30 + QUIET_MODE: "${QUIET_MODE:-false}" + LOCUST_OPTS: "--headless -u 10 -r 1" + volumes: + - ../common/locust-scripts:/locust diff --git a/signoz/deploy/docker-swarm/clickhouse-setup/docker-entrypoint-initdb.d/init-db.sql b/signoz/deploy/docker-swarm/clickhouse-setup/docker-entrypoint-initdb.d/init-db.sql new file mode 100644 index 0000000..f71983c --- /dev/null +++ b/signoz/deploy/docker-swarm/clickhouse-setup/docker-entrypoint-initdb.d/init-db.sql @@ -0,0 +1,31 @@ +CREATE TABLE IF NOT EXISTS signoz_index ( + timestamp DateTime64(9) CODEC(Delta, ZSTD(1)), + traceID String CODEC(ZSTD(1)), + spanID String CODEC(ZSTD(1)), + parentSpanID String CODEC(ZSTD(1)), + serviceName LowCardinality(String) CODEC(ZSTD(1)), + name LowCardinality(String) CODEC(ZSTD(1)), + kind Int32 CODEC(ZSTD(1)), + durationNano UInt64 CODEC(ZSTD(1)), + tags Array(String) CODEC(ZSTD(1)), + tagsKeys Array(String) CODEC(ZSTD(1)), + tagsValues Array(String) CODEC(ZSTD(1)), + statusCode Int64 CODEC(ZSTD(1)), + references String CODEC(ZSTD(1)), + externalHttpMethod Nullable(String) CODEC(ZSTD(1)), + externalHttpUrl Nullable(String) CODEC(ZSTD(1)), + component Nullable(String) CODEC(ZSTD(1)), + dbSystem Nullable(String) CODEC(ZSTD(1)), + dbName Nullable(String) CODEC(ZSTD(1)), + dbOperation Nullable(String) CODEC(ZSTD(1)), + peerService Nullable(String) CODEC(ZSTD(1)), + INDEX idx_traceID traceID TYPE bloom_filter GRANULARITY 4, + INDEX idx_service serviceName TYPE bloom_filter GRANULARITY 4, + INDEX idx_name name TYPE bloom_filter GRANULARITY 4, + INDEX idx_kind kind TYPE minmax GRANULARITY 4, + INDEX idx_tagsKeys tagsKeys TYPE bloom_filter(0.01) GRANULARITY 64, + INDEX idx_tagsValues tagsValues TYPE bloom_filter(0.01) GRANULARITY 64, + INDEX idx_duration durationNano TYPE minmax GRANULARITY 1 +) ENGINE MergeTree() +PARTITION BY toDate(timestamp) +ORDER BY (serviceName, -toUnixTimestamp(timestamp)) \ No newline at end of file diff --git a/signoz/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml b/signoz/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml new file mode 100644 index 0000000..13ab19c --- /dev/null +++ b/signoz/deploy/docker-swarm/clickhouse-setup/otel-collector-config.yaml @@ -0,0 +1,179 @@ +receivers: + tcplog/docker: + listen_address: "0.0.0.0:2255" + operators: + - type: regex_parser + regex: '^<([0-9]+)>[0-9]+ (?P[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P\S+) (?P\S+) [0-9]+ - -( (?P.*))?' + timestamp: + parse_from: attributes.timestamp + layout: '%Y-%m-%dT%H:%M:%S.%LZ' + - type: move + from: attributes["body"] + to: body + - type: remove + field: attributes.timestamp + # please remove names from below if you want to collect logs from them + - type: filter + id: signoz_logs_filter + expr: 'attributes.container_name matches "^signoz_(logspout|frontend|alertmanager|query-service|otel-collector|clickhouse|zookeeper)"' + opencensus: + endpoint: 0.0.0.0:55678 + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + jaeger: + protocols: + grpc: + endpoint: 0.0.0.0:14250 + thrift_http: + endpoint: 0.0.0.0:14268 + # thrift_compact: + # endpoint: 0.0.0.0:6831 + # thrift_binary: + # endpoint: 0.0.0.0:6832 + hostmetrics: + collection_interval: 30s + scrapers: + cpu: {} + load: {} + memory: {} + disk: {} + filesystem: {} + network: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + # otel-collector internal metrics + - job_name: otel-collector + static_configs: + - targets: + - localhost:8888 + labels: + job_name: otel-collector + +processors: + batch: + send_batch_size: 10000 + send_batch_max_size: 11000 + timeout: 10s + resourcedetection: + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + detectors: [env, system] # include ec2 for AWS, gcp for GCP and azure for Azure. + timeout: 2s + signozspanmetrics/cumulative: + metrics_exporter: clickhousemetricswrite + latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] + dimensions_cache_size: 100000 + dimensions: + - name: service.namespace + default: default + - name: deployment.environment + default: default + # This is added to ensure the uniqueness of the timeseries + # Otherwise, identical timeseries produced by multiple replicas of + # collectors result in incorrect APM metrics + - name: signoz.collector.id + - name: service.version + - name: browser.platform + - name: browser.mobile + - name: k8s.cluster.name + - name: k8s.node.name + - name: k8s.namespace.name + - name: host.name + - name: host.type + - name: container.name + # memory_limiter: + # # 80% of maximum memory up to 2G + # limit_mib: 1500 + # # 25% of limit up to 2G + # spike_limit_mib: 512 + # check_interval: 5s + # + # # 50% of the maximum memory + # limit_percentage: 50 + # # 20% of max memory usage spike expected + # spike_limit_percentage: 20 + # queued_retry: + # num_workers: 4 + # queue_size: 100 + # retry_on_failure: true + signozspanmetrics/delta: + metrics_exporter: clickhousemetricswrite + latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] + dimensions_cache_size: 100000 + aggregation_temporality: AGGREGATION_TEMPORALITY_DELTA + enable_exp_histogram: true + dimensions: + - name: service.namespace + default: default + - name: deployment.environment + default: default + # This is added to ensure the uniqueness of the timeseries + # Otherwise, identical timeseries produced by multiple replicas of + # collectors result in incorrect APM metrics + - name: signoz.collector.id + - name: service.version + - name: browser.platform + - name: browser.mobile + - name: k8s.cluster.name + - name: k8s.node.name + - name: k8s.namespace.name + - name: host.name + - name: host.type + - name: container.name + +exporters: + clickhousetraces: + datasource: tcp://clickhouse:9000/signoz_traces + docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER} + low_cardinal_exception_grouping: ${LOW_CARDINAL_EXCEPTION_GROUPING} + clickhousemetricswrite: + endpoint: tcp://clickhouse:9000/signoz_metrics + resource_to_telemetry_conversion: + enabled: true + clickhousemetricswrite/prometheus: + endpoint: tcp://clickhouse:9000/signoz_metrics + # logging: {} + clickhouselogsexporter: + dsn: tcp://clickhouse:9000/signoz_logs + docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER} + timeout: 10s +extensions: + health_check: + endpoint: 0.0.0.0:13133 + zpages: + endpoint: 0.0.0.0:55679 + pprof: + endpoint: 0.0.0.0:1777 + +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: [health_check, zpages, pprof] + pipelines: + traces: + receivers: [jaeger, otlp] + processors: [signozspanmetrics/cumulative, signozspanmetrics/delta, batch] + exporters: [clickhousetraces] + metrics: + receivers: [otlp] + processors: [batch] + exporters: [clickhousemetricswrite] + metrics/generic: + receivers: [hostmetrics] + processors: [resourcedetection, batch] + exporters: [clickhousemetricswrite] + metrics/prometheus: + receivers: [prometheus] + processors: [batch] + exporters: [clickhousemetricswrite/prometheus] + logs: + receivers: [otlp, tcplog/docker] + processors: [batch] + exporters: [clickhouselogsexporter] diff --git a/signoz/deploy/docker-swarm/clickhouse-setup/otel-collector-opamp-config.yaml b/signoz/deploy/docker-swarm/clickhouse-setup/otel-collector-opamp-config.yaml new file mode 100644 index 0000000..e408b55 --- /dev/null +++ b/signoz/deploy/docker-swarm/clickhouse-setup/otel-collector-opamp-config.yaml @@ -0,0 +1 @@ +server_endpoint: ws://query-service:4320/v1/opamp diff --git a/signoz/deploy/docker-swarm/clickhouse-setup/prometheus.yml b/signoz/deploy/docker-swarm/clickhouse-setup/prometheus.yml new file mode 100644 index 0000000..d7c5289 --- /dev/null +++ b/signoz/deploy/docker-swarm/clickhouse-setup/prometheus.yml @@ -0,0 +1,25 @@ +# my global config +global: + scrape_interval: 5s # Set the scrape interval to every 15 seconds. Default is every 1 minute. + evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute. + # scrape_timeout is set to the global default (10s). + +# Alertmanager configuration +alerting: + alertmanagers: + - static_configs: + - targets: + - alertmanager:9093 + +# Load rules once and periodically evaluate them according to the global 'evaluation_interval'. +rule_files: + # - "first_rules.yml" + # - "second_rules.yml" + - 'alerts.yml' + +# A scrape configuration containing exactly one endpoint to scrape: +# Here it's Prometheus itself. +scrape_configs: [] + +remote_read: + - url: tcp://clickhouse:9000/signoz_metrics diff --git a/signoz/deploy/docker-swarm/common/locust-scripts/locustfile.py b/signoz/deploy/docker-swarm/common/locust-scripts/locustfile.py new file mode 100644 index 0000000..0b51820 --- /dev/null +++ b/signoz/deploy/docker-swarm/common/locust-scripts/locustfile.py @@ -0,0 +1,16 @@ +from locust import HttpUser, task, between +class UserTasks(HttpUser): + wait_time = between(5, 15) + + @task + def rachel(self): + self.client.get("/dispatch?customer=123&nonse=0.6308392664170006") + @task + def trom(self): + self.client.get("/dispatch?customer=392&nonse=0.015296363321630757") + @task + def japanese(self): + self.client.get("/dispatch?customer=731&nonse=0.8022286220408668") + @task + def coffee(self): + self.client.get("/dispatch?customer=567&nonse=0.0022220379420636593") diff --git a/signoz/deploy/docker-swarm/common/nginx-config.conf b/signoz/deploy/docker-swarm/common/nginx-config.conf new file mode 100644 index 0000000..f7943e2 --- /dev/null +++ b/signoz/deploy/docker-swarm/common/nginx-config.conf @@ -0,0 +1,51 @@ +server { + listen 3301; + server_name _; + + gzip on; + gzip_static on; + gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript; + gzip_proxied any; + gzip_vary on; + gzip_comp_level 6; + gzip_buffers 16 8k; + gzip_http_version 1.1; + + # to handle uri issue 414 from nginx + client_max_body_size 24M; + large_client_header_buffers 8 128k; + + location / { + if ( $uri = '/index.html' ) { + add_header Cache-Control no-store always; + } + root /usr/share/nginx/html; + index index.html index.htm; + try_files $uri $uri/ /index.html; + } + + location ~ ^/api/(v1|v3)/logs/(tail|livetail){ + proxy_pass http://query-service:8080; + proxy_http_version 1.1; + + # connection will be closed if no data is read for 600s between successive read operations + proxy_read_timeout 600s; + + # dont buffer the data send it directly to client. + proxy_buffering off; + proxy_cache off; + } + + location /api { + proxy_pass http://query-service:8080/api; + # connection will be closed if no data is read for 600s between successive read operations + proxy_read_timeout 600s; + } + + # redirect server error pages to the static page /50x.html + # + error_page 500 502 503 504 /50x.html; + location = /50x.html { + root /usr/share/nginx/html; + } +} \ No newline at end of file diff --git a/signoz/deploy/docker-swarm/dashboards/.gitkeep b/signoz/deploy/docker-swarm/dashboards/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/signoz/deploy/docker/clickhouse-setup/alertmanager.yml b/signoz/deploy/docker/clickhouse-setup/alertmanager.yml new file mode 100644 index 0000000..d69357f --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/alertmanager.yml @@ -0,0 +1,35 @@ +global: + resolve_timeout: 1m + slack_api_url: 'https://hooks.slack.com/services/xxx' + +route: + receiver: 'slack-notifications' + +receivers: +- name: 'slack-notifications' + slack_configs: + - channel: '#alerts' + send_resolved: true + icon_url: https://avatars3.githubusercontent.com/u/3380462 + title: |- + [{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }} + {{- if gt (len .CommonLabels) (len .GroupLabels) -}} + {{" "}}( + {{- with .CommonLabels.Remove .GroupLabels.Names }} + {{- range $index, $label := .SortedPairs -}} + {{ if $index }}, {{ end }} + {{- $label.Name }}="{{ $label.Value -}}" + {{- end }} + {{- end -}} + ) + {{- end }} + text: >- + {{ range .Alerts -}} + *Alert:* {{ .Annotations.title }}{{ if .Labels.severity }} - `{{ .Labels.severity }}`{{ end }} + + *Description:* {{ .Annotations.description }} + + *Details:* + {{ range .Labels.SortedPairs }} • *{{ .Name }}:* `{{ .Value }}` + {{ end }} + {{ end }} \ No newline at end of file diff --git a/signoz/deploy/docker/clickhouse-setup/alerts.yml b/signoz/deploy/docker/clickhouse-setup/alerts.yml new file mode 100644 index 0000000..810a207 --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/alerts.yml @@ -0,0 +1,11 @@ +groups: +- name: ExampleCPULoadGroup + rules: + - alert: HighCpuLoad + expr: system_cpu_load_average_1m > 0.1 + for: 0m + labels: + severity: warning + annotations: + summary: High CPU load + description: "CPU load is > 0.1\n VALUE = {{ $value }}\n LABELS = {{ $labels }}" diff --git a/signoz/deploy/docker/clickhouse-setup/clickhouse-cluster.xml b/signoz/deploy/docker/clickhouse-setup/clickhouse-cluster.xml new file mode 100644 index 0000000..0e3ddcd --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/clickhouse-cluster.xml @@ -0,0 +1,75 @@ + + + + + + zookeeper-1 + 2181 + + + + + + + + + + + + + + + + clickhouse + 9000 + + + + + + + + \ No newline at end of file diff --git a/signoz/deploy/docker/clickhouse-setup/clickhouse-config.xml b/signoz/deploy/docker/clickhouse-setup/clickhouse-config.xml new file mode 100644 index 0000000..f8213b6 --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/clickhouse-config.xml @@ -0,0 +1,1140 @@ + + + + + + information + /var/log/clickhouse-server/clickhouse-server.log + /var/log/clickhouse-server/clickhouse-server.err.log + + 1000M + 10 + + + + + + + + + + + + + + + + + + 8123 + + + 9000 + + + 9004 + + + 9005 + + + + + + + + + + + + 9009 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 4096 + + + 3 + + + + + false + + + /path/to/ssl_cert_file + /path/to/ssl_key_file + + + false + + + /path/to/ssl_ca_cert_file + + + none + + + 0 + + + -1 + -1 + + + false + + + + + + + + + + + none + true + true + sslv2,sslv3 + true + + + + true + true + sslv2,sslv3 + true + + + + RejectCertificateHandler + + + + + + + + + 100 + + + 0 + + + + 10000 + + + + + + 0.9 + + + 4194304 + + + 0 + + + + + + 8589934592 + + + 5368709120 + + + + 1000 + + + 134217728 + + + 10000 + + + /var/lib/clickhouse/ + + + /var/lib/clickhouse/tmp/ + + + + ` + + + + + + /var/lib/clickhouse/user_files/ + + + + + + + + + + + + + users.xml + + + + /var/lib/clickhouse/access/ + + + + + + + default + + + + + + + + + + + + default + + + + + + + + + true + + + false + + ' | sed -e 's|.*>\(.*\)<.*|\1|') + wget https://github.com/ClickHouse/clickhouse-jdbc-bridge/releases/download/v$PKG_VER/clickhouse-jdbc-bridge_$PKG_VER-1_all.deb + apt install --no-install-recommends -f ./clickhouse-jdbc-bridge_$PKG_VER-1_all.deb + clickhouse-jdbc-bridge & + + * [CentOS/RHEL] + export MVN_URL=https://repo1.maven.org/maven2/ru/yandex/clickhouse/clickhouse-jdbc-bridge + export PKG_VER=$(curl -sL $MVN_URL/maven-metadata.xml | grep '' | sed -e 's|.*>\(.*\)<.*|\1|') + wget https://github.com/ClickHouse/clickhouse-jdbc-bridge/releases/download/v$PKG_VER/clickhouse-jdbc-bridge-$PKG_VER-1.noarch.rpm + yum localinstall -y clickhouse-jdbc-bridge-$PKG_VER-1.noarch.rpm + clickhouse-jdbc-bridge & + + Please refer to https://github.com/ClickHouse/clickhouse-jdbc-bridge#usage for more information. + ]]> + + + + + + + + + + + + + + + + + 3600 + + + + 3600 + + + 60 + + + + + + + + + + + + + system + query_log
+ + toYYYYMM(event_date) + + + + + + 7500 +
+ + + + system + trace_log
+ + toYYYYMM(event_date) + 7500 +
+ + + + system + query_thread_log
+ toYYYYMM(event_date) + 7500 +
+ + + + system + query_views_log
+ toYYYYMM(event_date) + 7500 +
+ + + + system + part_log
+ toYYYYMM(event_date) + 7500 +
+ + + + + + system + metric_log
+ 7500 + 1000 +
+ + + + system + asynchronous_metric_log
+ + 7000 +
+ + + + + + engine MergeTree + partition by toYYYYMM(finish_date) + order by (finish_date, finish_time_us, trace_id) + + system + opentelemetry_span_log
+ 7500 +
+ + + + + system + crash_log
+ + + 1000 +
+ + + + + + + system + processors_profile_log
+ + toYYYYMM(event_date) + 7500 +
+ + + + + + + + + *_dictionary.xml + + + *function.xml + /var/lib/clickhouse/user_scripts/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + /clickhouse/task_queue/ddl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + click_cost + any + + 0 + 3600 + + + 86400 + 60 + + + + max + + 0 + 60 + + + 3600 + 300 + + + 86400 + 3600 + + + + + + /var/lib/clickhouse/format_schemas/ + + + + + hide encrypt/decrypt arguments + ((?:aes_)?(?:encrypt|decrypt)(?:_mysql)?)\s*\(\s*(?:'(?:\\'|.)+'|.*?)\s*\) + + \1(???) + + + + + + + + + + false + + false + + + https://6f33034cfe684dd7a3ab9875e57b1c8d@o388870.ingest.sentry.io/5226277 + + + + + + + + + + + 268435456 + true + +
diff --git a/signoz/deploy/docker/clickhouse-setup/clickhouse-storage.xml b/signoz/deploy/docker/clickhouse-setup/clickhouse-storage.xml new file mode 100644 index 0000000..54ec497 --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/clickhouse-storage.xml @@ -0,0 +1,41 @@ + + + + + + 10485760 + + + s3 + + https://BUCKET-NAME.s3-REGION-NAME.amazonaws.com/data/ + ACCESS-KEY-ID + SECRET-ACCESS-KEY + + + + + + + + + + + default + + + s3 + 0 + + + + + + diff --git a/signoz/deploy/docker/clickhouse-setup/clickhouse-users.xml b/signoz/deploy/docker/clickhouse-setup/clickhouse-users.xml new file mode 100644 index 0000000..f185620 --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/clickhouse-users.xml @@ -0,0 +1,123 @@ + + + + + + + + + + 10000000000 + + + random + + + + + 1 + + + + + + + + + + + + + ::/0 + + + + default + + + default + + + + + + + + + + + + + + 3600 + + + 0 + 0 + 0 + 0 + 0 + + + + diff --git a/signoz/deploy/docker/clickhouse-setup/custom-function.xml b/signoz/deploy/docker/clickhouse-setup/custom-function.xml new file mode 100644 index 0000000..b2b3f91 --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/custom-function.xml @@ -0,0 +1,21 @@ + + + executable + histogramQuantile + Float64 + + Array(Float64) + buckets + + + Array(Float64) + counts + + + Float64 + quantile + + CSV + ./histogramQuantile + + diff --git a/signoz/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/signoz/deploy/docker/clickhouse-setup/docker-compose-core.yaml new file mode 100644 index 0000000..470cc7e --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -0,0 +1,154 @@ +version: "2.4" + +services: + zookeeper-1: + image: bitnami/zookeeper:3.7.1 + container_name: signoz-zookeeper-1 + hostname: zookeeper-1 + user: root + ports: + - "2181:2181" + - "2888:2888" + - "3888:3888" + volumes: + - ./data/zookeeper-1:/bitnami/zookeeper + environment: + - ZOO_SERVER_ID=1 + # - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888 + - ALLOW_ANONYMOUS_LOGIN=yes + - ZOO_AUTOPURGE_INTERVAL=1 + + clickhouse: + image: clickhouse/clickhouse-server:24.1.2-alpine + container_name: signoz-clickhouse + # ports: + # - "9000:9000" + # - "8123:8123" + tty: true + volumes: + - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml + - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml + # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml + - ./data/clickhouse/:/var/lib/clickhouse/ + - ./user_scripts:/var/lib/clickhouse/user_scripts/ + restart: on-failure + logging: + options: + max-size: 50m + max-file: "3" + healthcheck: + # "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'" + test: + [ + "CMD", + "wget", + "--spider", + "-q", + "0.0.0.0:8123/ping" + ] + interval: 30s + timeout: 5s + retries: 3 + + alertmanager: + container_name: signoz-alertmanager + image: signoz/alertmanager:0.23.5 + volumes: + - ./data/alertmanager:/data + depends_on: + query-service: + condition: service_healthy + restart: on-failure + command: + - --queryService.url=http://query-service:8085 + - --storage.path=/data + + otel-collector-migrator: + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.3} + container_name: otel-migrator + command: + - "--dsn=tcp://clickhouse:9000" + depends_on: + clickhouse: + condition: service_healthy + # clickhouse-2: + # condition: service_healthy + # clickhouse-3: + # condition: service_healthy + + # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` + otel-collector: + container_name: signoz-otel-collector + image: signoz/signoz-otel-collector:0.102.3 + command: + [ + "--config=/etc/otel-collector-config.yaml", + "--manager-config=/etc/manager-config.yaml", + "--copy-path=/var/tmp/collector-config.yaml", + "--feature-gates=-pkg.translator.prometheus.NormalizeName" + ] + # user: root # required for reading docker container logs + volumes: + - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml + - ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml + environment: + - OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux + ports: + # - "1777:1777" # pprof extension + - "4317:4317" # OTLP gRPC receiver + - "4318:4318" # OTLP HTTP receiver + # - "8888:8888" # OtelCollector internal metrics + # - "8889:8889" # signoz spanmetrics exposed by the agent + # - "9411:9411" # Zipkin port + # - "13133:13133" # health check extension + # - "14250:14250" # Jaeger gRPC + # - "14268:14268" # Jaeger thrift HTTP + # - "55678:55678" # OpenCensus receiver + # - "55679:55679" # zPages extension + restart: on-failure + depends_on: + clickhouse: + condition: service_healthy + otel-collector-migrator: + condition: service_completed_successfully + query-service: + condition: service_healthy + + logspout: + image: "gliderlabs/logspout:v3.2.14" + container_name: signoz-logspout + volumes: + - /etc/hostname:/etc/host_hostname:ro + - /var/run/docker.sock:/var/run/docker.sock + command: syslog+tcp://otel-collector:2255 + depends_on: + - otel-collector + restart: on-failure + + hotrod: + image: jaegertracing/example-hotrod:1.30 + container_name: hotrod + logging: + options: + max-size: 50m + max-file: "3" + command: [ "all" ] + environment: + - JAEGER_ENDPOINT=http://otel-collector:14268/api/traces + + load-hotrod: + image: "signoz/locust:1.2.3" + container_name: load-hotrod + hostname: load-hotrod + environment: + ATTACKED_HOST: http://hotrod:8080 + LOCUST_MODE: standalone + NO_PROXY: standalone + TASK_DELAY_FROM: 5 + TASK_DELAY_TO: 30 + QUIET_MODE: "${QUIET_MODE:-false}" + LOCUST_OPTS: "--headless -u 10 -r 1" + volumes: + - ../common/locust-scripts:/locust diff --git a/signoz/deploy/docker/clickhouse-setup/docker-compose-local.yaml b/signoz/deploy/docker/clickhouse-setup/docker-compose-local.yaml new file mode 100644 index 0000000..248c7bf --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/docker-compose-local.yaml @@ -0,0 +1,67 @@ +version: "2.4" + +services: + query-service: + hostname: query-service + build: + context: "../../../" + dockerfile: "./pkg/query-service/Dockerfile" + args: + LDFLAGS: "" + TARGETPLATFORM: "${GOOS}/${GOARCH}" + container_name: signoz-query-service + environment: + - ClickHouseUrl=tcp://clickhouse:9000 + - ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/ + - SIGNOZ_LOCAL_DB_PATH=/var/lib/signoz/signoz.db + - DASHBOARDS_PATH=/root/config/dashboards + - STORAGE=clickhouse + - GODEBUG=netdns=go + - TELEMETRY_ENABLED=true + volumes: + - ./prometheus.yml:/root/config/prometheus.yml + - ../dashboards:/root/config/dashboards + - ./data/signoz/:/var/lib/signoz/ + command: + [ + "-config=/root/config/prometheus.yml", + # "--prefer-delta=true" + ] + ports: + - "6060:6060" + - "8080:8080" + restart: on-failure + healthcheck: + test: + [ + "CMD", + "wget", + "--spider", + "-q", + "localhost:8080/api/v1/health" + ] + interval: 30s + timeout: 5s + retries: 3 + depends_on: + clickhouse: + condition: service_healthy + + frontend: + build: + context: "../../../frontend" + dockerfile: "./Dockerfile" + args: + TARGETOS: "${GOOS}" + TARGETPLATFORM: "${GOARCH}" + container_name: signoz-frontend + environment: + - FRONTEND_API_ENDPOINT=http://query-service:8080 + restart: on-failure + depends_on: + - alertmanager + - query-service + ports: + - "3301:3301" + volumes: + - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf diff --git a/signoz/deploy/docker/clickhouse-setup/docker-compose.testing.yaml b/signoz/deploy/docker/clickhouse-setup/docker-compose.testing.yaml new file mode 100644 index 0000000..02231d7 --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/docker-compose.testing.yaml @@ -0,0 +1,307 @@ +version: "2.4" + +x-clickhouse-defaults: &clickhouse-defaults + restart: on-failure + # addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab + image: clickhouse/clickhouse-server:24.1.2-alpine + tty: true + depends_on: + - zookeeper-1 + # - zookeeper-2 + # - zookeeper-3 + logging: + options: + max-size: 50m + max-file: "3" + healthcheck: + # "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'" + test: + [ + "CMD", + "wget", + "--spider", + "-q", + "0.0.0.0:8123/ping" + ] + interval: 30s + timeout: 5s + retries: 3 + ulimits: + nproc: 65535 + nofile: + soft: 262144 + hard: 262144 + +x-db-depend: &db-depend + depends_on: + clickhouse: + condition: service_healthy + otel-collector-migrator: + condition: service_completed_successfully + # clickhouse-2: + # condition: service_healthy + # clickhouse-3: + # condition: service_healthy + +services: + + zookeeper-1: + image: bitnami/zookeeper:3.7.1 + container_name: signoz-zookeeper-1 + hostname: zookeeper-1 + user: root + ports: + - "2181:2181" + - "2888:2888" + - "3888:3888" + volumes: + - ./data/zookeeper-1:/bitnami/zookeeper + environment: + - ZOO_SERVER_ID=1 + # - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888 + - ALLOW_ANONYMOUS_LOGIN=yes + - ZOO_AUTOPURGE_INTERVAL=1 + + # zookeeper-2: + # image: bitnami/zookeeper:3.7.0 + # container_name: signoz-zookeeper-2 + # hostname: zookeeper-2 + # user: root + # ports: + # - "2182:2181" + # - "2889:2888" + # - "3889:3888" + # volumes: + # - ./data/zookeeper-2:/bitnami/zookeeper + # environment: + # - ZOO_SERVER_ID=2 + # - ZOO_SERVERS=zookeeper-1:2888:3888,0.0.0.0:2888:3888,zookeeper-3:2888:3888 + # - ALLOW_ANONYMOUS_LOGIN=yes + # - ZOO_AUTOPURGE_INTERVAL=1 + + # zookeeper-3: + # image: bitnami/zookeeper:3.7.0 + # container_name: signoz-zookeeper-3 + # hostname: zookeeper-3 + # user: root + # ports: + # - "2183:2181" + # - "2890:2888" + # - "3890:3888" + # volumes: + # - ./data/zookeeper-3:/bitnami/zookeeper + # environment: + # - ZOO_SERVER_ID=3 + # - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888 + # - ALLOW_ANONYMOUS_LOGIN=yes + # - ZOO_AUTOPURGE_INTERVAL=1 + + clickhouse: + <<: *clickhouse-defaults + container_name: signoz-clickhouse + hostname: clickhouse + ports: + - "9000:9000" + - "8123:8123" + - "9181:9181" + volumes: + - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml + - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml + # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml + - ./data/clickhouse/:/var/lib/clickhouse/ + - ./user_scripts:/var/lib/clickhouse/user_scripts/ + + # clickhouse-2: + # <<: *clickhouse-defaults + # container_name: signoz-clickhouse-2 + # hostname: clickhouse-2 + # ports: + # - "9001:9000" + # - "8124:8123" + # - "9182:9181" + # volumes: + # - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + # - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + # - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml + # - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml + # # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml + # - ./data/clickhouse-2/:/var/lib/clickhouse/ + # - ./user_scripts:/var/lib/clickhouse/user_scripts/ + + + # clickhouse-3: + # <<: *clickhouse-defaults + # container_name: signoz-clickhouse-3 + # hostname: clickhouse-3 + # ports: + # - "9002:9000" + # - "8125:8123" + # - "9183:9181" + # volumes: + # - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + # - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + # - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml + # - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml + # # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml + # - ./data/clickhouse-3/:/var/lib/clickhouse/ + # - ./user_scripts:/var/lib/clickhouse/user_scripts/ + + alertmanager: + image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.5} + container_name: signoz-alertmanager + volumes: + - ./data/alertmanager:/data + depends_on: + query-service: + condition: service_healthy + restart: on-failure + command: + - --queryService.url=http://query-service:8085 + - --storage.path=/data + + # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` + + query-service: + image: signoz/query-service:${DOCKER_TAG:-0.51.0} + container_name: signoz-query-service + command: + [ + "-config=/root/config/prometheus.yml", + "-gateway-url=https://api.staging.signoz.cloud" + # "--prefer-delta=true" + ] + # ports: + # - "6060:6060" # pprof port + # - "8080:8080" # query-service port + volumes: + - ./prometheus.yml:/root/config/prometheus.yml + - ../dashboards:/root/config/dashboards + - ./data/signoz/:/var/lib/signoz/ + environment: + - ClickHouseUrl=tcp://clickhouse:9000 + - ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/ + - SIGNOZ_LOCAL_DB_PATH=/var/lib/signoz/signoz.db + - DASHBOARDS_PATH=/root/config/dashboards + - STORAGE=clickhouse + - GODEBUG=netdns=go + - TELEMETRY_ENABLED=true + - DEPLOYMENT_TYPE=docker-standalone-amd + restart: on-failure + healthcheck: + test: + [ + "CMD", + "wget", + "--spider", + "-q", + "localhost:8080/api/v1/health" + ] + interval: 30s + timeout: 5s + retries: 3 + <<: *db-depend + + frontend: + image: signoz/frontend:${DOCKER_TAG:-0.51.0} + container_name: signoz-frontend + restart: on-failure + depends_on: + - alertmanager + - query-service + ports: + - "3301:3301" + volumes: + - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf + + otel-collector-migrator: + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.3} + container_name: otel-migrator + command: + - "--dsn=tcp://clickhouse:9000" + depends_on: + clickhouse: + condition: service_healthy + # clickhouse-2: + # condition: service_healthy + # clickhouse-3: + # condition: service_healthy + + + otel-collector: + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.3} + container_name: signoz-otel-collector + command: + [ + "--config=/etc/otel-collector-config.yaml", + "--manager-config=/etc/manager-config.yaml", + "--copy-path=/var/tmp/collector-config.yaml", + "--feature-gates=-pkg.translator.prometheus.NormalizeName" + ] + user: root # required for reading docker container logs + volumes: + - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml + - ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml + - /var/lib/docker/containers:/var/lib/docker/containers:ro + environment: + - OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux + - DOCKER_MULTI_NODE_CLUSTER=false + - LOW_CARDINAL_EXCEPTION_GROUPING=false + ports: + # - "1777:1777" # pprof extension + - "4317:4317" # OTLP gRPC receiver + - "4318:4318" # OTLP HTTP receiver + # - "8888:8888" # OtelCollector internal metrics + # - "8889:8889" # signoz spanmetrics exposed by the agent + # - "9411:9411" # Zipkin port + # - "13133:13133" # health check extension + # - "14250:14250" # Jaeger gRPC + # - "14268:14268" # Jaeger thrift HTTP + # - "55678:55678" # OpenCensus receiver + # - "55679:55679" # zPages extension + restart: on-failure + depends_on: + clickhouse: + condition: service_healthy + otel-collector-migrator: + condition: service_completed_successfully + query-service: + condition: service_healthy + + logspout: + image: "gliderlabs/logspout:v3.2.14" + container_name: signoz-logspout + volumes: + - /etc/hostname:/etc/host_hostname:ro + - /var/run/docker.sock:/var/run/docker.sock + command: syslog+tcp://otel-collector:2255 + depends_on: + - otel-collector + restart: on-failure + + hotrod: + image: jaegertracing/example-hotrod:1.30 + container_name: hotrod + logging: + options: + max-size: 50m + max-file: "3" + command: [ "all" ] + environment: + - JAEGER_ENDPOINT=http://otel-collector:14268/api/traces + + load-hotrod: + image: "signoz/locust:1.2.3" + container_name: load-hotrod + hostname: load-hotrod + environment: + ATTACKED_HOST: http://hotrod:8080 + LOCUST_MODE: standalone + NO_PROXY: standalone + TASK_DELAY_FROM: 5 + TASK_DELAY_TO: 30 + QUIET_MODE: "${QUIET_MODE:-false}" + LOCUST_OPTS: "--headless -u 10 -r 1" + volumes: + - ../common/locust-scripts:/locust diff --git a/signoz/deploy/docker/clickhouse-setup/docker-compose.yaml b/signoz/deploy/docker/clickhouse-setup/docker-compose.yaml new file mode 100644 index 0000000..be7f2ba --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -0,0 +1,283 @@ +version: "2.4" + +x-clickhouse-defaults: &clickhouse-defaults + restart: on-failure + # addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab + image: clickhouse/clickhouse-server:24.1.2-alpine + tty: true + depends_on: + - zookeeper-1 + # - zookeeper-2 + # - zookeeper-3 + logging: + options: + max-size: 50m + max-file: "3" + healthcheck: + # "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'" + test: + [ + "CMD", + "wget", + "--spider", + "-q", + "0.0.0.0:8123/ping" + ] + interval: 30s + timeout: 5s + retries: 3 + ulimits: + nproc: 65535 + nofile: + soft: 262144 + hard: 262144 + +x-db-depend: &db-depend + depends_on: + clickhouse: + condition: service_healthy + otel-collector-migrator: + condition: service_completed_successfully + # clickhouse-2: + # condition: service_healthy + # clickhouse-3: + # condition: service_healthy + +services: + + zookeeper-1: + image: bitnami/zookeeper:3.7.1 + container_name: signoz-zookeeper-1 + hostname: zookeeper-1 + user: root + ports: + - "2181:2181" + - "2888:2888" + - "3888:3888" + volumes: + - ./data/zookeeper-1:/bitnami/zookeeper + environment: + - ZOO_SERVER_ID=1 + # - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888 + - ALLOW_ANONYMOUS_LOGIN=yes + - ZOO_AUTOPURGE_INTERVAL=1 + + # zookeeper-2: + # image: bitnami/zookeeper:3.7.0 + # container_name: signoz-zookeeper-2 + # hostname: zookeeper-2 + # user: root + # ports: + # - "2182:2181" + # - "2889:2888" + # - "3889:3888" + # volumes: + # - ./data/zookeeper-2:/bitnami/zookeeper + # environment: + # - ZOO_SERVER_ID=2 + # - ZOO_SERVERS=zookeeper-1:2888:3888,0.0.0.0:2888:3888,zookeeper-3:2888:3888 + # - ALLOW_ANONYMOUS_LOGIN=yes + # - ZOO_AUTOPURGE_INTERVAL=1 + + # zookeeper-3: + # image: bitnami/zookeeper:3.7.0 + # container_name: signoz-zookeeper-3 + # hostname: zookeeper-3 + # user: root + # ports: + # - "2183:2181" + # - "2890:2888" + # - "3890:3888" + # volumes: + # - ./data/zookeeper-3:/bitnami/zookeeper + # environment: + # - ZOO_SERVER_ID=3 + # - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888 + # - ALLOW_ANONYMOUS_LOGIN=yes + # - ZOO_AUTOPURGE_INTERVAL=1 + + clickhouse: + <<: *clickhouse-defaults + container_name: signoz-clickhouse + hostname: clickhouse + ports: + - "9001:9000" + - "8123:8123" + - "9181:9181" + volumes: + - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml + - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml + # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml + - ./data/clickhouse/:/var/lib/clickhouse/ + - ./user_scripts:/var/lib/clickhouse/user_scripts/ + + # clickhouse-2: + # <<: *clickhouse-defaults + # container_name: signoz-clickhouse-2 + # hostname: clickhouse-2 + # ports: + # - "9001:9000" + # - "8124:8123" + # - "9182:9181" + # volumes: + # - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + # - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + # - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml + # - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml + # # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml + # - ./data/clickhouse-2/:/var/lib/clickhouse/ + # - ./user_scripts:/var/lib/clickhouse/user_scripts/ + + + # clickhouse-3: + # <<: *clickhouse-defaults + # container_name: signoz-clickhouse-3 + # hostname: clickhouse-3 + # ports: + # - "9002:9000" + # - "8125:8123" + # - "9183:9181" + # volumes: + # - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml + # - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + # - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml + # - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml + # # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml + # - ./data/clickhouse-3/:/var/lib/clickhouse/ + # - ./user_scripts:/var/lib/clickhouse/user_scripts/ + + alertmanager: + image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.5} + container_name: signoz-alertmanager + volumes: + - ./data/alertmanager:/data + depends_on: + query-service: + condition: service_healthy + restart: on-failure + command: + - --queryService.url=http://query-service:8085 + - --storage.path=/data + + # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` + + query-service: + image: signoz/query-service:${DOCKER_TAG:-0.51.0} + container_name: signoz-query-service + command: + [ + "-config=/root/config/prometheus.yml" + # "--prefer-delta=true" + ] + # ports: + # - "6060:6060" # pprof port + # - "8080:8080" # query-service port + volumes: + - ./prometheus.yml:/root/config/prometheus.yml + - ../dashboards:/root/config/dashboards + - ./data/signoz/:/var/lib/signoz/ + environment: + - ClickHouseUrl=tcp://clickhouse:9000 + - ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/ + - SIGNOZ_LOCAL_DB_PATH=/var/lib/signoz/signoz.db + - DASHBOARDS_PATH=/root/config/dashboards + - STORAGE=clickhouse + - GODEBUG=netdns=go + - TELEMETRY_ENABLED=true + - DEPLOYMENT_TYPE=docker-standalone-amd + restart: on-failure + healthcheck: + test: + [ + "CMD", + "wget", + "--spider", + "-q", + "localhost:8080/api/v1/health" + ] + interval: 30s + timeout: 5s + retries: 3 + <<: *db-depend + + frontend: + image: signoz/frontend:${DOCKER_TAG:-0.51.0} + container_name: signoz-frontend + restart: on-failure + depends_on: + - alertmanager + - query-service + ports: + - "3301:3301" + volumes: + - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf + + otel-collector-migrator: + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.3} + container_name: otel-migrator + command: + - "--dsn=tcp://clickhouse:9000" + depends_on: + clickhouse: + condition: service_healthy + # clickhouse-2: + # condition: service_healthy + # clickhouse-3: + # condition: service_healthy + + + otel-collector: + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.3} + container_name: signoz-otel-collector + command: + [ + "--config=/etc/otel-collector-config.yaml", + "--manager-config=/etc/manager-config.yaml", + "--copy-path=/var/tmp/collector-config.yaml", + "--feature-gates=-pkg.translator.prometheus.NormalizeName" + ] + user: root # required for reading docker container logs + volumes: + - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml + - ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml + - /var/lib/docker/containers:/var/lib/docker/containers:ro + environment: + - OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux + - DOCKER_MULTI_NODE_CLUSTER=false + - LOW_CARDINAL_EXCEPTION_GROUPING=false + ports: + # - "1777:1777" # pprof extension + - "4317:4317" # OTLP gRPC receiver + - "4318:4318" # OTLP HTTP receiver + - "8082:8082" + - "54525:54525" + # - "8888:8888" # OtelCollector internal metrics + # - "8889:8889" # signoz spanmetrics exposed by the agent + # - "9411:9411" # Zipkin port + # - "13133:13133" # health check extension + # - "14250:14250" # Jaeger gRPC + # - "14268:14268" # Jaeger thrift HTTP + # - "55678:55678" # OpenCensus receiver + # - "55679:55679" # zPages extension + restart: on-failure + depends_on: + clickhouse: + condition: service_healthy + otel-collector-migrator: + condition: service_completed_successfully + query-service: + condition: service_healthy + + logspout: + image: "gliderlabs/logspout:v3.2.14" + container_name: signoz-logspout + volumes: + - /etc/hostname:/etc/host_hostname:ro + - /var/run/docker.sock:/var/run/docker.sock + command: syslog+tcp://otel-collector:2255 + depends_on: + - otel-collector + restart: on-failure + diff --git a/signoz/deploy/docker/clickhouse-setup/keeper_config.xml b/signoz/deploy/docker/clickhouse-setup/keeper_config.xml new file mode 100644 index 0000000..a9a25c1 --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/keeper_config.xml @@ -0,0 +1,64 @@ + + + + information + /var/log/clickhouse-keeper/clickhouse-keeper.log + /var/log/clickhouse-keeper/clickhouse-keeper.err.log + + 1000M + 10 + + + + 0.0.0.0 + 4096 + + + 9181 + + + 1 + + /var/lib/clickhouse/coordination/logs + /var/lib/clickhouse/coordination/snapshots + + + 10000 + 10000 + 100000 + information + false + + + + + true + + + 1 + + + clickhouses-keeper-1 + 9234 + + + + + + + \ No newline at end of file diff --git a/signoz/deploy/docker/clickhouse-setup/otel-collector-config.yaml b/signoz/deploy/docker/clickhouse-setup/otel-collector-config.yaml new file mode 100644 index 0000000..e21272e --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/otel-collector-config.yaml @@ -0,0 +1,191 @@ +receivers: + httplogreceiver/json: + endpoint: 0.0.0.0:8082 + source: json + tcplog: + listen_address: "0.0.0.0:54525" + tcplog/docker: + listen_address: "0.0.0.0:2255" + operators: + - type: regex_parser + regex: '^<([0-9]+)>[0-9]+ (?P[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P\S+) (?P\S+) [0-9]+ - -( (?P.*))?' + timestamp: + parse_from: attributes.timestamp + layout: '%Y-%m-%dT%H:%M:%S.%LZ' + - type: move + from: attributes["body"] + to: body + - type: remove + field: attributes.timestamp + # please remove names from below if you want to collect logs from them + - type: filter + id: signoz_logs_filter + expr: 'attributes.container_name matches "^signoz-(logspout|frontend|alertmanager|query-service|otel-collector|clickhouse|zookeeper)"' + opencensus: + endpoint: 0.0.0.0:55678 + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + jaeger: + protocols: + grpc: + endpoint: 0.0.0.0:14250 + thrift_http: + endpoint: 0.0.0.0:14268 + # thrift_compact: + # endpoint: 0.0.0.0:6831 + # thrift_binary: + # endpoint: 0.0.0.0:6832 + hostmetrics: + collection_interval: 30s + scrapers: + cpu: {} + load: {} + memory: {} + disk: {} + filesystem: {} + network: {} + prometheus: + config: + global: + scrape_interval: 60s + scrape_configs: + # otel-collector internal metrics + - job_name: otel-collector + static_configs: + - targets: + - localhost:8888 + labels: + job_name: otel-collector + + +processors: + batch: + send_batch_size: 10000 + send_batch_max_size: 11000 + timeout: 10s + signozspanmetrics/cumulative: + metrics_exporter: clickhousemetricswrite + metrics_flush_interval: 60s + latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] + dimensions_cache_size: 100000 + dimensions: + - name: service.namespace + default: default + - name: deployment.environment + default: default + # This is added to ensure the uniqueness of the timeseries + # Otherwise, identical timeseries produced by multiple replicas of + # collectors result in incorrect APM metrics + - name: signoz.collector.id + - name: service.version + - name: browser.platform + - name: browser.mobile + - name: k8s.cluster.name + - name: k8s.node.name + - name: k8s.namespace.name + - name: host.name + - name: host.type + - name: container.name + # memory_limiter: + # # 80% of maximum memory up to 2G + # limit_mib: 1500 + # # 25% of limit up to 2G + # spike_limit_mib: 512 + # check_interval: 5s + # + # # 50% of the maximum memory + # limit_percentage: 50 + # # 20% of max memory usage spike expected + # spike_limit_percentage: 20 + # queued_retry: + # num_workers: 4 + # queue_size: 100 + # retry_on_failure: true + resourcedetection: + # Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels. + detectors: [env, system] # include ec2 for AWS, gcp for GCP and azure for Azure. + timeout: 2s + signozspanmetrics/delta: + metrics_exporter: clickhousemetricswrite + metrics_flush_interval: 60s + latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] + dimensions_cache_size: 100000 + aggregation_temporality: AGGREGATION_TEMPORALITY_DELTA + enable_exp_histogram: true + dimensions: + - name: service.namespace + default: default + - name: deployment.environment + default: default + # This is added to ensure the uniqueness of the timeseries + # Otherwise, identical timeseries produced by multiple replicas of + # collectors result in incorrect APM metrics + - name: signoz.collector.id + - name: service.version + - name: browser.platform + - name: browser.mobile + - name: k8s.cluster.name + - name: k8s.node.name + - name: k8s.namespace.name + - name: host.name + - name: host.type + - name: container.name + +extensions: + health_check: + endpoint: 0.0.0.0:13133 + zpages: + endpoint: 0.0.0.0:55679 + pprof: + endpoint: 0.0.0.0:1777 + +exporters: + clickhousetraces: + datasource: tcp://clickhouse:9000/signoz_traces + docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER} + low_cardinal_exception_grouping: ${LOW_CARDINAL_EXCEPTION_GROUPING} + clickhousemetricswrite: + endpoint: tcp://clickhouse:9000/signoz_metrics + resource_to_telemetry_conversion: + enabled: true + clickhousemetricswrite/prometheus: + endpoint: tcp://clickhouse:9000/signoz_metrics + clickhouselogsexporter: + dsn: tcp://clickhouse:9000/signoz_logs + docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER} + timeout: 10s + # logging: {} + +service: + telemetry: + metrics: + address: 0.0.0.0:8888 + extensions: + - health_check + - zpages + - pprof + pipelines: + traces: + receivers: [jaeger, otlp] + processors: [signozspanmetrics/cumulative, signozspanmetrics/delta, batch] + exporters: [clickhousetraces] + metrics: + receivers: [otlp] + processors: [batch] + exporters: [clickhousemetricswrite] + metrics/generic: + receivers: [hostmetrics] + processors: [resourcedetection, batch] + exporters: [clickhousemetricswrite] + metrics/prometheus: + receivers: [prometheus] + processors: [batch] + exporters: [clickhousemetricswrite/prometheus] + logs: + receivers: [otlp, tcplog/docker, httplogreceiver/json, tcplog] + processors: [batch] + exporters: [clickhouselogsexporter] diff --git a/signoz/deploy/docker/clickhouse-setup/otel-collector-opamp-config.yaml b/signoz/deploy/docker/clickhouse-setup/otel-collector-opamp-config.yaml new file mode 100644 index 0000000..e408b55 --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/otel-collector-opamp-config.yaml @@ -0,0 +1 @@ +server_endpoint: ws://query-service:4320/v1/opamp diff --git a/signoz/deploy/docker/clickhouse-setup/prometheus.yml b/signoz/deploy/docker/clickhouse-setup/prometheus.yml new file mode 100644 index 0000000..d7c5289 --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/prometheus.yml @@ -0,0 +1,25 @@ +# my global config +global: + scrape_interval: 5s # Set the scrape interval to every 15 seconds. Default is every 1 minute. + evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute. + # scrape_timeout is set to the global default (10s). + +# Alertmanager configuration +alerting: + alertmanagers: + - static_configs: + - targets: + - alertmanager:9093 + +# Load rules once and periodically evaluate them according to the global 'evaluation_interval'. +rule_files: + # - "first_rules.yml" + # - "second_rules.yml" + - 'alerts.yml' + +# A scrape configuration containing exactly one endpoint to scrape: +# Here it's Prometheus itself. +scrape_configs: [] + +remote_read: + - url: tcp://clickhouse:9000/signoz_metrics diff --git a/signoz/deploy/docker/clickhouse-setup/user_scripts/histogramQuantile b/signoz/deploy/docker/clickhouse-setup/user_scripts/histogramQuantile new file mode 100755 index 0000000..3b77a7b Binary files /dev/null and b/signoz/deploy/docker/clickhouse-setup/user_scripts/histogramQuantile differ diff --git a/signoz/deploy/docker/clickhouse-setup/user_scripts/histogramQuantile.go b/signoz/deploy/docker/clickhouse-setup/user_scripts/histogramQuantile.go new file mode 100644 index 0000000..9540a77 --- /dev/null +++ b/signoz/deploy/docker/clickhouse-setup/user_scripts/histogramQuantile.go @@ -0,0 +1,237 @@ +package main + +import ( + "bufio" + "fmt" + "math" + "os" + "sort" + "strconv" + "strings" +) + +// NOTE: executable must be built with target OS and architecture set to linux/amd64 +// env GOOS=linux GOARCH=amd64 go build -o histogramQuantile histogramQuantile.go + +// The following code is adapted from the following source: +// https://github.com/prometheus/prometheus/blob/main/promql/quantile.go + +type bucket struct { + upperBound float64 + count float64 +} + +// buckets implements sort.Interface. +type buckets []bucket + +func (b buckets) Len() int { return len(b) } +func (b buckets) Swap(i, j int) { b[i], b[j] = b[j], b[i] } +func (b buckets) Less(i, j int) bool { return b[i].upperBound < b[j].upperBound } + +// bucketQuantile calculates the quantile 'q' based on the given buckets. The +// buckets will be sorted by upperBound by this function (i.e. no sorting +// needed before calling this function). The quantile value is interpolated +// assuming a linear distribution within a bucket. However, if the quantile +// falls into the highest bucket, the upper bound of the 2nd highest bucket is +// returned. A natural lower bound of 0 is assumed if the upper bound of the +// lowest bucket is greater 0. In that case, interpolation in the lowest bucket +// happens linearly between 0 and the upper bound of the lowest bucket. +// However, if the lowest bucket has an upper bound less or equal 0, this upper +// bound is returned if the quantile falls into the lowest bucket. +// +// There are a number of special cases (once we have a way to report errors +// happening during evaluations of AST functions, we should report those +// explicitly): +// +// If 'buckets' has 0 observations, NaN is returned. +// +// If 'buckets' has fewer than 2 elements, NaN is returned. +// +// If the highest bucket is not +Inf, NaN is returned. +// +// If q==NaN, NaN is returned. +// +// If q<0, -Inf is returned. +// +// If q>1, +Inf is returned. +func bucketQuantile(q float64, buckets buckets) float64 { + if math.IsNaN(q) { + return math.NaN() + } + if q < 0 { + return math.Inf(-1) + } + if q > 1 { + return math.Inf(+1) + } + sort.Sort(buckets) + if !math.IsInf(buckets[len(buckets)-1].upperBound, +1) { + return math.NaN() + } + + buckets = coalesceBuckets(buckets) + ensureMonotonic(buckets) + + if len(buckets) < 2 { + return math.NaN() + } + observations := buckets[len(buckets)-1].count + if observations == 0 { + return math.NaN() + } + rank := q * observations + b := sort.Search(len(buckets)-1, func(i int) bool { return buckets[i].count >= rank }) + + if b == len(buckets)-1 { + return buckets[len(buckets)-2].upperBound + } + if b == 0 && buckets[0].upperBound <= 0 { + return buckets[0].upperBound + } + var ( + bucketStart float64 + bucketEnd = buckets[b].upperBound + count = buckets[b].count + ) + if b > 0 { + bucketStart = buckets[b-1].upperBound + count -= buckets[b-1].count + rank -= buckets[b-1].count + } + return bucketStart + (bucketEnd-bucketStart)*(rank/count) +} + +// coalesceBuckets merges buckets with the same upper bound. +// +// The input buckets must be sorted. +func coalesceBuckets(buckets buckets) buckets { + last := buckets[0] + i := 0 + for _, b := range buckets[1:] { + if b.upperBound == last.upperBound { + last.count += b.count + } else { + buckets[i] = last + last = b + i++ + } + } + buckets[i] = last + return buckets[:i+1] +} + +// The assumption that bucket counts increase monotonically with increasing +// upperBound may be violated during: +// +// * Recording rule evaluation of histogram_quantile, especially when rate() +// has been applied to the underlying bucket timeseries. +// * Evaluation of histogram_quantile computed over federated bucket +// timeseries, especially when rate() has been applied. +// +// This is because scraped data is not made available to rule evaluation or +// federation atomically, so some buckets are computed with data from the +// most recent scrapes, but the other buckets are missing data from the most +// recent scrape. +// +// Monotonicity is usually guaranteed because if a bucket with upper bound +// u1 has count c1, then any bucket with a higher upper bound u > u1 must +// have counted all c1 observations and perhaps more, so that c >= c1. +// +// Randomly interspersed partial sampling breaks that guarantee, and rate() +// exacerbates it. Specifically, suppose bucket le=1000 has a count of 10 from +// 4 samples but the bucket with le=2000 has a count of 7 from 3 samples. The +// monotonicity is broken. It is exacerbated by rate() because under normal +// operation, cumulative counting of buckets will cause the bucket counts to +// diverge such that small differences from missing samples are not a problem. +// rate() removes this divergence.) +// +// bucketQuantile depends on that monotonicity to do a binary search for the +// bucket with the φ-quantile count, so breaking the monotonicity +// guarantee causes bucketQuantile() to return undefined (nonsense) results. +// +// As a somewhat hacky solution until ingestion is atomic per scrape, we +// calculate the "envelope" of the histogram buckets, essentially removing +// any decreases in the count between successive buckets. + +func ensureMonotonic(buckets buckets) { + max := buckets[0].count + for i := 1; i < len(buckets); i++ { + switch { + case buckets[i].count > max: + max = buckets[i].count + case buckets[i].count < max: + buckets[i].count = max + } + } +} + +// End of copied code. + +func readLines() []string { + r := bufio.NewReader(os.Stdin) + bytes := []byte{} + lines := []string{} + for { + line, isPrefix, err := r.ReadLine() + if err != nil { + break + } + bytes = append(bytes, line...) + if !isPrefix { + str := strings.TrimSpace(string(bytes)) + if len(str) > 0 { + lines = append(lines, str) + bytes = []byte{} + } + } + } + if len(bytes) > 0 { + lines = append(lines, string(bytes)) + } + return lines +} + +func main() { + lines := readLines() + for _, text := range lines { + // Example input + // "[1, 2, 4, 8, 16]", "[1, 5, 8, 10, 14]", 0.9" + // bounds - counts - quantile + parts := strings.Split(text, "\",") + + var bucketNumbers []float64 + // Strip the ends with square brackets + text = parts[0][2 : len(parts[0])-1] + // Parse the bucket bounds + for _, num := range strings.Split(text, ",") { + num = strings.TrimSpace(num) + number, err := strconv.ParseFloat(num, 64) + if err == nil { + bucketNumbers = append(bucketNumbers, number) + } + } + + var bucketCounts []float64 + // Strip the ends with square brackets + text = parts[1][2 : len(parts[1])-1] + // Parse the bucket counts + for _, num := range strings.Split(text, ",") { + num = strings.TrimSpace(num) + number, err := strconv.ParseFloat(num, 64) + if err == nil { + bucketCounts = append(bucketCounts, number) + } + } + + // Parse the quantile + q, err := strconv.ParseFloat(parts[2], 64) + var b buckets + + if err == nil { + for i := 0; i < len(bucketNumbers); i++ { + b = append(b, bucket{upperBound: bucketNumbers[i], count: bucketCounts[i]}) + } + } + fmt.Println(bucketQuantile(q, b)) + } +} diff --git a/signoz/deploy/docker/common/locust-scripts/locustfile.py b/signoz/deploy/docker/common/locust-scripts/locustfile.py new file mode 100644 index 0000000..0b51820 --- /dev/null +++ b/signoz/deploy/docker/common/locust-scripts/locustfile.py @@ -0,0 +1,16 @@ +from locust import HttpUser, task, between +class UserTasks(HttpUser): + wait_time = between(5, 15) + + @task + def rachel(self): + self.client.get("/dispatch?customer=123&nonse=0.6308392664170006") + @task + def trom(self): + self.client.get("/dispatch?customer=392&nonse=0.015296363321630757") + @task + def japanese(self): + self.client.get("/dispatch?customer=731&nonse=0.8022286220408668") + @task + def coffee(self): + self.client.get("/dispatch?customer=567&nonse=0.0022220379420636593") diff --git a/signoz/deploy/docker/common/nginx-config.conf b/signoz/deploy/docker/common/nginx-config.conf new file mode 100644 index 0000000..f7943e2 --- /dev/null +++ b/signoz/deploy/docker/common/nginx-config.conf @@ -0,0 +1,51 @@ +server { + listen 3301; + server_name _; + + gzip on; + gzip_static on; + gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript; + gzip_proxied any; + gzip_vary on; + gzip_comp_level 6; + gzip_buffers 16 8k; + gzip_http_version 1.1; + + # to handle uri issue 414 from nginx + client_max_body_size 24M; + large_client_header_buffers 8 128k; + + location / { + if ( $uri = '/index.html' ) { + add_header Cache-Control no-store always; + } + root /usr/share/nginx/html; + index index.html index.htm; + try_files $uri $uri/ /index.html; + } + + location ~ ^/api/(v1|v3)/logs/(tail|livetail){ + proxy_pass http://query-service:8080; + proxy_http_version 1.1; + + # connection will be closed if no data is read for 600s between successive read operations + proxy_read_timeout 600s; + + # dont buffer the data send it directly to client. + proxy_buffering off; + proxy_cache off; + } + + location /api { + proxy_pass http://query-service:8080/api; + # connection will be closed if no data is read for 600s between successive read operations + proxy_read_timeout 600s; + } + + # redirect server error pages to the static page /50x.html + # + error_page 500 502 503 504 /50x.html; + location = /50x.html { + root /usr/share/nginx/html; + } +} \ No newline at end of file diff --git a/signoz/deploy/install.sh b/signoz/deploy/install.sh new file mode 100755 index 0000000..85c63c2 --- /dev/null +++ b/signoz/deploy/install.sh @@ -0,0 +1,559 @@ +#!/bin/bash + +set -o errexit + +# Regular Colors +Black='\033[0;30m' # Black +Red='\[\e[0;31m\]' # Red +Green='\033[0;32m' # Green +Yellow='\033[0;33m' # Yellow +Blue='\033[0;34m' # Blue +Purple='\033[0;35m' # Purple +Cyan='\033[0;36m' # Cyan +White='\033[0;37m' # White +NC='\033[0m' # No Color + +is_command_present() { + type "$1" >/dev/null 2>&1 +} + +# Check whether 'wget' command exists. +has_wget() { + has_cmd wget +} + +# Check whether 'curl' command exists. +has_curl() { + has_cmd curl +} + +# Check whether the given command exists. +has_cmd() { + command -v "$1" > /dev/null 2>&1 +} + +is_mac() { + [[ $OSTYPE == darwin* ]] +} + +is_arm64(){ + [[ `uname -m` == 'arm64' || `uname -m` == 'aarch64' ]] +} + +check_os() { + if is_mac; then + package_manager="brew" + desired_os=1 + os="Mac" + return + fi + + if is_arm64; then + arch="arm64" + arch_official="aarch64" + else + arch="amd64" + arch_official="x86_64" + fi + + platform=$(uname -s | tr '[:upper:]' '[:lower:]') + + os_name="$(cat /etc/*-release | awk -F= '$1 == "NAME" { gsub(/"/, ""); print $2; exit }')" + + case "$os_name" in + Ubuntu*|Pop!_OS) + desired_os=1 + os="ubuntu" + package_manager="apt-get" + ;; + Amazon\ Linux*) + desired_os=1 + os="amazon linux" + package_manager="yum" + ;; + Debian*) + desired_os=1 + os="debian" + package_manager="apt-get" + ;; + Linux\ Mint*) + desired_os=1 + os="linux mint" + package_manager="apt-get" + ;; + Red\ Hat*) + desired_os=1 + os="red hat" + package_manager="yum" + ;; + CentOS*) + desired_os=1 + os="centos" + package_manager="yum" + ;; + Rocky*) + desired_os=1 + os="centos" + package_manager="yum" + ;; + SLES*) + desired_os=1 + os="sles" + package_manager="zypper" + ;; + openSUSE*) + desired_os=1 + os="opensuse" + package_manager="zypper" + ;; + *) + desired_os=0 + os="Not Found: $os_name" + esac +} + + +# This function checks if the relevant ports required by SigNoz are available or not +# The script should error out in case they aren't available +check_ports_occupied() { + local port_check_output + local ports_pattern="3301|4317" + + if is_mac; then + port_check_output="$(netstat -anp tcp | awk '$6 == "LISTEN" && $4 ~ /^.*\.('"$ports_pattern"')$/')" + elif is_command_present ss; then + # The `ss` command seems to be a better/faster version of `netstat`, but is not available on all Linux + # distributions by default. Other distributions have `ss` but no `netstat`. So, we try for `ss` first, then + # fallback to `netstat`. + port_check_output="$(ss --all --numeric --tcp | awk '$1 == "LISTEN" && $4 ~ /^.*:('"$ports_pattern"')$/')" + elif is_command_present netstat; then + port_check_output="$(netstat --all --numeric --tcp | awk '$6 == "LISTEN" && $4 ~ /^.*:('"$ports_pattern"')$/')" + fi + + if [[ -n $port_check_output ]]; then + send_event "port_not_available" + + echo "+++++++++++ ERROR ++++++++++++++++++++++" + echo "SigNoz requires ports 3301 & 4317 to be open. Please shut down any other service(s) that may be running on these ports." + echo "You can run SigNoz on another port following this guide https://signoz.io/docs/install/troubleshooting/" + echo "++++++++++++++++++++++++++++++++++++++++" + echo "" + exit 1 + fi +} + +install_docker() { + echo "++++++++++++++++++++++++" + echo "Setting up docker repos" + + + if [[ $package_manager == apt-get ]]; then + apt_cmd="$sudo_cmd apt-get --yes --quiet" + $apt_cmd update + $apt_cmd install software-properties-common gnupg-agent + curl -fsSL "https://download.docker.com/linux/$os/gpg" | $sudo_cmd apt-key add - + $sudo_cmd add-apt-repository \ + "deb [arch=$arch] https://download.docker.com/linux/$os $(lsb_release -cs) stable" + $apt_cmd update + echo "Installing docker" + $apt_cmd install docker-ce docker-ce-cli containerd.io + elif [[ $package_manager == zypper ]]; then + zypper_cmd="$sudo_cmd zypper --quiet --no-gpg-checks --non-interactive" + echo "Installing docker" + if [[ $os == sles ]]; then + os_sp="$(cat /etc/*-release | awk -F= '$1 == "VERSION_ID" { gsub(/"/, ""); print $2; exit }')" + os_arch="$(uname -i)" + SUSEConnect -p sle-module-containers/$os_sp/$os_arch -r '' + fi + $zypper_cmd install docker docker-runc containerd + $sudo_cmd systemctl enable docker.service + elif [[ $package_manager == yum && $os == 'amazon linux' ]]; then + echo + echo "Amazon Linux detected ... " + echo + # yum install docker + # service docker start + $sudo_cmd yum install -y amazon-linux-extras + $sudo_cmd amazon-linux-extras enable docker + $sudo_cmd yum install -y docker + else + + yum_cmd="$sudo_cmd yum --assumeyes --quiet" + $yum_cmd install yum-utils + $sudo_cmd yum-config-manager --add-repo https://download.docker.com/linux/$os/docker-ce.repo + echo "Installing docker" + $yum_cmd install docker-ce docker-ce-cli containerd.io + + fi + +} + +compose_version () { + local compose_version + compose_version="$(curl -s https://api.github.com/repos/docker/compose/releases/latest | grep 'tag_name' | cut -d\" -f4)" + echo "${compose_version:-v2.18.1}" +} + +install_docker_compose() { + if [[ $package_manager == "apt-get" || $package_manager == "zypper" || $package_manager == "yum" ]]; then + if [[ ! -f /usr/bin/docker-compose ]];then + echo "++++++++++++++++++++++++" + echo "Installing docker-compose" + compose_url="https://github.com/docker/compose/releases/download/$(compose_version)/docker-compose-$platform-$arch_official" + echo "Downloading docker-compose from $compose_url" + $sudo_cmd curl -L "$compose_url" -o /usr/local/bin/docker-compose + $sudo_cmd chmod +x /usr/local/bin/docker-compose + $sudo_cmd ln -s /usr/local/bin/docker-compose /usr/bin/docker-compose + echo "docker-compose installed!" + echo "" + fi + else + send_event "docker_compose_not_found" + + echo "+++++++++++ IMPORTANT READ ++++++++++++++++++++++" + echo "docker-compose not found! Please install docker-compose first and then continue with this installation." + echo "Refer https://docs.docker.com/compose/install/ for installing docker-compose." + echo "+++++++++++++++++++++++++++++++++++++++++++++++++" + exit 1 + fi +} + +start_docker() { + echo -e "🐳 Starting Docker ...\n" + if [[ $os == "Mac" ]]; then + open --background -a Docker && while ! docker system info > /dev/null 2>&1; do sleep 1; done + else + if ! $sudo_cmd systemctl is-active docker.service > /dev/null; then + echo "Starting docker service" + $sudo_cmd systemctl start docker.service + fi + # if [[ -z $sudo_cmd ]]; then + # docker ps > /dev/null && true + # if [[ $? -ne 0 ]]; then + # request_sudo + # fi + # fi + if [[ -z $sudo_cmd ]]; then + if ! docker ps > /dev/null && true; then + request_sudo + fi + fi + fi +} + +wait_for_containers_start() { + local timeout=$1 + + # The while loop is important because for-loops don't work for dynamic values + while [[ $timeout -gt 0 ]]; do + status_code="$(curl -s -o /dev/null -w "%{http_code}" "http://localhost:3301/api/v1/health?live=1" || true)" + if [[ status_code -eq 200 ]]; then + break + else + echo -ne "Waiting for all containers to start. This check will timeout in $timeout seconds ...\r\c" + fi + ((timeout--)) + sleep 1 + done + + echo "" +} + +bye() { # Prints a friendly good bye message and exits the script. + if [[ "$?" -ne 0 ]]; then + set +o errexit + + echo "🔴 The containers didn't seem to start correctly. Please run the following command to check containers that may have errored out:" + echo "" + echo -e "$sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml ps -a" + + echo "Please read our troubleshooting guide https://signoz.io/docs/install/troubleshooting/" + echo "or reach us for support in #help channel in our Slack Community https://signoz.io/slack" + echo "++++++++++++++++++++++++++++++++++++++++" + + if [[ $email == "" ]]; then + echo -e "\n📨 Please share your email to receive support with the installation" + read -rp 'Email: ' email + + while [[ $email == "" ]] + do + read -rp 'Email: ' email + done + fi + + send_event "installation_support" + + + echo "" + echo -e "\nWe will reach out to you at the email provided shortly, Exiting for now. Bye! 👋 \n" + exit 0 + fi +} + +request_sudo() { + if hash sudo 2>/dev/null; then + echo -e "\n\n🙇 We will need sudo access to complete the installation." + if (( $EUID != 0 )); then + sudo_cmd="sudo" + echo -e "Please enter your sudo password, if prompted." + # $sudo_cmd -l | grep -e "NOPASSWD: ALL" > /dev/null + # if [[ $? -ne 0 ]] && ! $sudo_cmd -v; then + # echo "Need sudo privileges to proceed with the installation." + # exit 1; + # fi + if ! $sudo_cmd -l | grep -e "NOPASSWD: ALL" > /dev/null && ! $sudo_cmd -v; then + echo "Need sudo privileges to proceed with the installation." + exit 1; + fi + + echo -e "Got it! Thanks!! 🙏\n" + echo -e "Okay! We will bring up the SigNoz cluster from here 🚀\n" + fi + fi +} + +echo "" +echo -e "👋 Thank you for trying out SigNoz! " +echo "" + +sudo_cmd="" + +# Check sudo permissions +if (( $EUID != 0 )); then + echo "🟡 Running installer with non-sudo permissions." + echo " In case of any failure or prompt, please consider running the script with sudo privileges." + echo "" +else + sudo_cmd="sudo" +fi + +# Checking OS and assigning package manager +desired_os=0 +os="" +email="" +echo -e "🌏 Detecting your OS ...\n" +check_os + +# Obtain unique installation id +# sysinfo="$(uname -a)" +# if [[ $? -ne 0 ]]; then +# uuid="$(uuidgen)" +# uuid="${uuid:-$(cat /proc/sys/kernel/random/uuid)}" +# sysinfo="${uuid:-$(cat /proc/sys/kernel/random/uuid)}" +# fi +if ! sysinfo="$(uname -a)"; then + uuid="$(uuidgen)" + uuid="${uuid:-$(cat /proc/sys/kernel/random/uuid)}" + sysinfo="${uuid:-$(cat /proc/sys/kernel/random/uuid)}" +fi + +digest_cmd="" +if hash shasum 2>/dev/null; then + digest_cmd="shasum -a 256" +elif hash sha256sum 2>/dev/null; then + digest_cmd="sha256sum" +elif hash openssl 2>/dev/null; then + digest_cmd="openssl dgst -sha256" +fi + +if [[ -z $digest_cmd ]]; then + SIGNOZ_INSTALLATION_ID="$sysinfo" +else + SIGNOZ_INSTALLATION_ID=$(echo "$sysinfo" | $digest_cmd | grep -E -o '[a-zA-Z0-9]{64}') +fi + +# echo "" + +# echo -e "👉 ${RED}Two ways to go forward\n" +# echo -e "${RED}1) ClickHouse as database (default)\n" +# read -p "⚙️ Enter your preference (1/2):" choice_setup + +# while [[ $choice_setup != "1" && $choice_setup != "2" && $choice_setup != "" ]] +# do +# # echo $choice_setup +# echo -e "\n❌ ${CYAN}Please enter either 1 or 2" +# read -p "⚙️ Enter your preference (1/2): " choice_setup +# # echo $choice_setup +# done + +# if [[ $choice_setup == "1" || $choice_setup == "" ]];then +# setup_type='clickhouse' +# fi + +setup_type='clickhouse' + +# echo -e "\n✅ ${CYAN}You have chosen: ${setup_type} setup\n" + +# Run bye if failure happens +trap bye EXIT + +URL="https://api.segment.io/v1/track" +HEADER_1="Content-Type: application/json" +HEADER_2="Authorization: Basic OWtScko3b1BDR1BFSkxGNlFqTVBMdDVibGpGaFJRQnI=" + +send_event() { + error="" + + case "$1" in + 'install_started') + event="Installation Started" + ;; + 'os_not_supported') + event="Installation Error" + error="OS Not Supported" + ;; + 'docker_not_installed') + event="Installation Error" + error="Docker not installed" + ;; + 'docker_compose_not_found') + event="Installation Error" + event="Docker Compose not found" + ;; + 'port_not_available') + event="Installation Error" + error="port not available" + ;; + 'installation_error_checks') + event="Installation Error - Checks" + error="Containers not started" + others='"data": "some_checks",' + ;; + 'installation_support') + event="Installation Support" + others='"email": "'"$email"'",' + ;; + 'installation_success') + event="Installation Success" + ;; + 'identify_successful_installation') + event="Identify Successful Installation" + others='"email": "'"$email"'",' + ;; + *) + print_error "unknown event type: $1" + exit 1 + ;; + esac + + if [[ "$error" != "" ]]; then + error='"error": "'"$error"'", ' + fi + + DATA='{ "anonymousId": "'"$SIGNOZ_INSTALLATION_ID"'", "event": "'"$event"'", "properties": { "os": "'"$os"'", '"$error $others"' "setup_type": "'"$setup_type"'" } }' + + if has_curl; then + curl -sfL -d "$DATA" --header "$HEADER_1" --header "$HEADER_2" "$URL" > /dev/null 2>&1 + elif has_wget; then + wget -q --post-data="$DATA" --header "$HEADER_1" --header "$HEADER_2" "$URL" > /dev/null 2>&1 + fi +} + +send_event "install_started" + +if [[ $desired_os -eq 0 ]]; then + send_event "os_not_supported" +fi + +# check_ports_occupied + +# Check is Docker daemon is installed and available. If not, the install & start Docker for Linux machines. We cannot automatically install Docker Desktop on Mac OS +if ! is_command_present docker; then + + if [[ $package_manager == "apt-get" || $package_manager == "zypper" || $package_manager == "yum" ]]; then + request_sudo + install_docker + # enable docker without sudo from next reboot + sudo usermod -aG docker "${USER}" + elif is_mac; then + echo "" + echo "+++++++++++ IMPORTANT READ ++++++++++++++++++++++" + echo "Docker Desktop must be installed manually on Mac OS to proceed. Docker can only be installed automatically on Ubuntu / openSUSE / SLES / Redhat / Cent OS" + echo "https://docs.docker.com/docker-for-mac/install/" + echo "++++++++++++++++++++++++++++++++++++++++++++++++" + + send_event "docker_not_installed" + exit 1 + else + echo "" + echo "+++++++++++ IMPORTANT READ ++++++++++++++++++++++" + echo "Docker must be installed manually on your machine to proceed. Docker can only be installed automatically on Ubuntu / openSUSE / SLES / Redhat / Cent OS" + echo "https://docs.docker.com/get-docker/" + echo "++++++++++++++++++++++++++++++++++++++++++++++++" + + send_event "docker_not_installed" + exit 1 + fi +fi + +# Install docker-compose +if ! is_command_present docker-compose; then + request_sudo + install_docker_compose +fi + +start_docker + +# $sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml up -d --remove-orphans || true + + +echo "" +echo -e "\n🟡 Pulling the latest container images for SigNoz.\n" +$sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml pull + +echo "" +echo "🟡 Starting the SigNoz containers. It may take a few minutes ..." +echo +# The docker-compose command does some nasty stuff for the `--detach` functionality. So we add a `|| true` so that the +# script doesn't exit because this command looks like it failed to do it's thing. +$sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml up --detach --remove-orphans || true + +wait_for_containers_start 60 +echo "" + +if [[ $status_code -ne 200 ]]; then + echo "+++++++++++ ERROR ++++++++++++++++++++++" + echo "🔴 The containers didn't seem to start correctly. Please run the following command to check containers that may have errored out:" + echo "" + + echo -e "$sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml ps -a" + + echo "Please read our troubleshooting guide https://signoz.io/docs/install/troubleshooting/" + echo "or reach us on SigNoz for support https://signoz.io/slack" + echo "++++++++++++++++++++++++++++++++++++++++" + + send_event "installation_error_checks" + exit 1 + +else + send_event "installation_success" + + echo "++++++++++++++++++ SUCCESS ++++++++++++++++++++++" + echo "" + echo "🟢 Your installation is complete!" + echo "" + echo -e "🟢 Your frontend is running on http://localhost:3301" + echo "" + echo "ℹ️ By default, retention period is set to 15 days for logs and traces, and 30 days for metrics." + echo -e "To change this, navigate to the General tab on the Settings page of SigNoz UI. For more details, refer to https://signoz.io/docs/userguide/retention-period \n" + + echo "ℹ️ To bring down SigNoz and clean volumes : $sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml down -v" + + echo "" + echo "+++++++++++++++++++++++++++++++++++++++++++++++++" + echo "" + echo "👉 Need help in Getting Started?" + echo -e "Join us on Slack https://signoz.io/slack" + echo "" + echo -e "\n📨 Please share your email to receive support & updates about SigNoz!" + read -rp 'Email: ' email + + while [[ $email == "" ]] + do + read -rp 'Email: ' email + done + + send_event "identify_successful_installation" +fi + +echo -e "\n🙏 Thank you!\n" diff --git a/signoz/e2e/package.json b/signoz/e2e/package.json new file mode 100644 index 0000000..b0d1f6f --- /dev/null +++ b/signoz/e2e/package.json @@ -0,0 +1,14 @@ +{ + "name": "e2e", + "version": "1.0.0", + "main": "index.js", + "license": "MIT", + "devDependencies": { + "@playwright/test": "^1.22.0", + "@types/node": "^20.9.2" + }, + "scripts": {}, + "dependencies": { + "dotenv": "8.2.0" + } +} diff --git a/signoz/e2e/playwright.config.ts b/signoz/e2e/playwright.config.ts new file mode 100644 index 0000000..b1a0f30 --- /dev/null +++ b/signoz/e2e/playwright.config.ts @@ -0,0 +1,46 @@ +import { defineConfig, devices } from "@playwright/test"; +import dotenv from "dotenv"; + +dotenv.config(); + +export default defineConfig({ + testDir: "./tests", + + fullyParallel: true, + + forbidOnly: !!process.env.CI, + + name: "Signoz E2E", + + retries: process.env.CI ? 2 : 0, + + reporter: process.env.CI ? "github" : "list", + + preserveOutput: "always", + + updateSnapshots: "all", + + quiet: false, + + testMatch: ["**/*.spec.ts"], + + use: { + trace: "on-first-retry", + + baseURL: + process.env.PLAYWRIGHT_TEST_BASE_URL || "https://stagingapp.signoz.io/", + }, + + projects: [ + { name: "setup", testMatch: /.*\.setup\.ts/ }, + { + name: "chromium", + use: { + ...devices["Desktop Chrome"], + // Use prepared auth state. + storageState: ".auth/user.json", + }, + dependencies: ["setup"], + }, + ], +}); diff --git a/signoz/e2e/tests/auth.setup.ts b/signoz/e2e/tests/auth.setup.ts new file mode 100644 index 0000000..9ca8173 --- /dev/null +++ b/signoz/e2e/tests/auth.setup.ts @@ -0,0 +1,37 @@ +import { test, expect } from "@playwright/test"; +import ROUTES from "../../frontend/src/constants/routes"; +import dotenv from "dotenv"; + +dotenv.config(); + +const authFile = ".auth/user.json"; + +test("E2E Login Test", async ({ page }) => { + await Promise.all([page.goto("/"), page.waitForRequest("**/version")]); + + const signup = "Monitor your applications. Find what is causing issues."; + + const el = await page.locator(`text=${signup}`); + + expect(el).toBeVisible(); + + await page + .locator("id=loginEmail") + .type( + process.env.PLAYWRIGHT_USERNAME ? process.env.PLAYWRIGHT_USERNAME : "" + ); + + await page.getByText("Next").click(); + + await page + .locator('input[id="currentPassword"]') + .fill( + process.env.PLAYWRIGHT_PASSWORD ? process.env.PLAYWRIGHT_PASSWORD : "" + ); + + await page.locator('button[data-attr="signup"]').click(); + + await expect(page).toHaveURL(ROUTES.APPLICATION); + + await page.context().storageState({ path: authFile }); +}); diff --git a/signoz/e2e/tests/contants.ts b/signoz/e2e/tests/contants.ts new file mode 100644 index 0000000..e44f761 --- /dev/null +++ b/signoz/e2e/tests/contants.ts @@ -0,0 +1,10 @@ +export const SERVICE_TABLE_HEADERS = { + APPLICATION: "Applicaton", + P99LATENCY: "P99 latency (in ms)", + ERROR_RATE: "Error Rate (% of total)", + OPS_PER_SECOND: "Operations Per Second", +}; + +export const DATA_TEST_IDS = { + NEW_DASHBOARD_BTN: "create-new-dashboard", +}; diff --git a/signoz/e2e/tests/navigation.spec.ts b/signoz/e2e/tests/navigation.spec.ts new file mode 100644 index 0000000..6f30b2f --- /dev/null +++ b/signoz/e2e/tests/navigation.spec.ts @@ -0,0 +1,40 @@ +import { test, expect } from "@playwright/test"; +import ROUTES from "../../frontend/src/constants/routes"; +import { DATA_TEST_IDS, SERVICE_TABLE_HEADERS } from "./contants"; + +test("Basic Navigation Check across different resources", async ({ page }) => { + // route to services page and check if the page renders fine with BE contract + await Promise.all([ + page.goto(ROUTES.APPLICATION), + page.waitForRequest("**/v1/services"), + ]); + + const p99Latency = page.locator( + `th:has-text("${SERVICE_TABLE_HEADERS.P99LATENCY}")` + ); + + await expect(p99Latency).toBeVisible(); + + // route to the new trace explorer page and check if the page renders fine + await page.goto(ROUTES.TRACES_EXPLORER); + + await page.waitForLoadState("networkidle"); + + const listViewTable = await page + .locator('div[role="presentation"]') + .isVisible(); + + expect(listViewTable).toBeTruthy(); + + // route to the dashboards page and check if the page renders fine + await Promise.all([ + page.goto(ROUTES.ALL_DASHBOARD), + page.waitForRequest("**/v1/dashboards"), + ]); + + const newDashboardBtn = await page + .locator(`data-testid=${DATA_TEST_IDS.NEW_DASHBOARD_BTN}`) + .isVisible(); + + expect(newDashboardBtn).toBeTruthy(); +}); diff --git a/signoz/e2e/yarn.lock b/signoz/e2e/yarn.lock new file mode 100644 index 0000000..1fd28ee --- /dev/null +++ b/signoz/e2e/yarn.lock @@ -0,0 +1,46 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@playwright/test@^1.22.0": + version "1.40.0" + resolved "https://registry.yarnpkg.com/@playwright/test/-/test-1.40.0.tgz#d06c506977dd7863aa16e07f2136351ecc1be6ed" + integrity sha512-PdW+kn4eV99iP5gxWNSDQCbhMaDVej+RXL5xr6t04nbKLCBwYtA046t7ofoczHOm8u6c+45hpDKQVZqtqwkeQg== + dependencies: + playwright "1.40.0" + +"@types/node@^20.9.2": + version "20.9.2" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.9.2.tgz#002815c8e87fe0c9369121c78b52e800fadc0ac6" + integrity sha512-WHZXKFCEyIUJzAwh3NyyTHYSR35SevJ6mZ1nWwJafKtiQbqRTIKSRcw3Ma3acqgsent3RRDqeVwpHntMk+9irg== + dependencies: + undici-types "~5.26.4" + +dotenv@8.2.0: + version "8.2.0" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.2.0.tgz#97e619259ada750eea3e4ea3e26bceea5424b16a" + integrity sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw== + +fsevents@2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +playwright-core@1.40.0: + version "1.40.0" + resolved "https://registry.yarnpkg.com/playwright-core/-/playwright-core-1.40.0.tgz#82f61e5504cb3097803b6f8bbd98190dd34bdf14" + integrity sha512-fvKewVJpGeca8t0ipM56jkVSU6Eo0RmFvQ/MaCQNDYm+sdvKkMBBWTE1FdeMqIdumRaXXjZChWHvIzCGM/tA/Q== + +playwright@1.40.0: + version "1.40.0" + resolved "https://registry.yarnpkg.com/playwright/-/playwright-1.40.0.tgz#2a1824b9fe5c4fe52ed53db9ea68003543a99df0" + integrity sha512-gyHAgQjiDf1m34Xpwzaqb76KgfzYrhK7iih+2IzcOCoZWr/8ZqmdBw+t0RU85ZmfJMgtgAiNtBQ/KS2325INXw== + dependencies: + playwright-core "1.40.0" + optionalDependencies: + fsevents "2.3.2" + +undici-types@~5.26.4: + version "5.26.5" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" + integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== diff --git a/signoz/ee/LICENSE b/signoz/ee/LICENSE new file mode 100644 index 0000000..c024dbd --- /dev/null +++ b/signoz/ee/LICENSE @@ -0,0 +1,37 @@ + +The SigNoz Enterprise license (the "Enterprise License") +Copyright (c) 2020 - present SigNoz Inc. + +With regard to the SigNoz Software: + +This software and associated documentation files (the "Software") may only be +used in production, if you (and any entity that you represent) have agreed to, +and are in compliance with, the SigNoz Subscription Terms of Service, available +via email (hello@signoz.io) (the "Enterprise Terms"), or other +agreement governing the use of the Software, as agreed by you and SigNoz, +and otherwise have a valid SigNoz Enterprise license for the +correct number of user seats. Subject to the foregoing sentence, you are free to +modify this Software and publish patches to the Software. You agree that SigNoz +and/or its licensors (as applicable) retain all right, title and interest in and +to all such modifications and/or patches, and all such modifications and/or +patches may only be used, copied, modified, displayed, distributed, or otherwise +exploited with a valid SigNoz Enterprise license for the correct +number of user seats. Notwithstanding the foregoing, you may copy and modify +the Software for development and testing purposes, without requiring a +subscription. You agree that SigNoz and/or its licensors (as applicable) retain +all right, title and interest in and to all such modifications. You are not +granted any other rights beyond what is expressly stated herein. Subject to the +foregoing, it is forbidden to copy, merge, publish, distribute, sublicense, +and/or sell the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +For all third party components incorporated into the SigNoz Software, those +components are licensed under the original license provided by the owner of the +applicable component. \ No newline at end of file diff --git a/signoz/ee/query-service/.dockerignore b/signoz/ee/query-service/.dockerignore new file mode 100644 index 0000000..9521c50 --- /dev/null +++ b/signoz/ee/query-service/.dockerignore @@ -0,0 +1,4 @@ +.vscode +README.md +signoz.db +bin \ No newline at end of file diff --git a/signoz/ee/query-service/Dockerfile b/signoz/ee/query-service/Dockerfile new file mode 100644 index 0000000..55ed33a --- /dev/null +++ b/signoz/ee/query-service/Dockerfile @@ -0,0 +1,31 @@ +# use a minimal alpine image +FROM alpine:3.18.6 + +# Add Maintainer Info +LABEL maintainer="signoz" + +# define arguments that can be passed during build time +ARG TARGETOS TARGETARCH + +# add ca-certificates in case you need them +RUN apk update && apk add ca-certificates && rm -rf /var/cache/apk/* + +# set working directory +WORKDIR /root + +# copy the query-service binary +COPY ee/query-service/bin/query-service-${TARGETOS}-${TARGETARCH} /root/query-service + +# copy prometheus YAML config +COPY pkg/query-service/config/prometheus.yml /root/config/prometheus.yml +COPY pkg/query-service/templates /root/templates + +# Make query-service executable for non-root users +RUN chmod 755 /root /root/query-service + +# run the binary +ENTRYPOINT ["./query-service"] + +CMD ["-config", "/root/config/prometheus.yml"] + +EXPOSE 8080 diff --git a/signoz/ee/query-service/app/api/api.go b/signoz/ee/query-service/app/api/api.go new file mode 100644 index 0000000..66b462e --- /dev/null +++ b/signoz/ee/query-service/app/api/api.go @@ -0,0 +1,194 @@ +package api + +import ( + "net/http" + "net/http/httputil" + "time" + + "github.com/gorilla/mux" + "go.signoz.io/signoz/ee/query-service/dao" + "go.signoz.io/signoz/ee/query-service/integrations/gateway" + "go.signoz.io/signoz/ee/query-service/interfaces" + "go.signoz.io/signoz/ee/query-service/license" + "go.signoz.io/signoz/ee/query-service/usage" + baseapp "go.signoz.io/signoz/pkg/query-service/app" + "go.signoz.io/signoz/pkg/query-service/app/integrations" + "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline" + "go.signoz.io/signoz/pkg/query-service/cache" + baseint "go.signoz.io/signoz/pkg/query-service/interfaces" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + rules "go.signoz.io/signoz/pkg/query-service/rules" + "go.signoz.io/signoz/pkg/query-service/version" +) + +type APIHandlerOptions struct { + DataConnector interfaces.DataConnector + SkipConfig *basemodel.SkipConfig + PreferSpanMetrics bool + MaxIdleConns int + MaxOpenConns int + DialTimeout time.Duration + AppDao dao.ModelDao + RulesManager *rules.Manager + UsageManager *usage.Manager + FeatureFlags baseint.FeatureLookup + LicenseManager *license.Manager + IntegrationsController *integrations.Controller + LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController + Cache cache.Cache + Gateway *httputil.ReverseProxy + // Querier Influx Interval + FluxInterval time.Duration +} + +type APIHandler struct { + opts APIHandlerOptions + baseapp.APIHandler +} + +// NewAPIHandler returns an APIHandler +func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) { + + baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{ + Reader: opts.DataConnector, + SkipConfig: opts.SkipConfig, + PreferSpanMetrics: opts.PreferSpanMetrics, + MaxIdleConns: opts.MaxIdleConns, + MaxOpenConns: opts.MaxOpenConns, + DialTimeout: opts.DialTimeout, + AppDao: opts.AppDao, + RuleManager: opts.RulesManager, + FeatureFlags: opts.FeatureFlags, + IntegrationsController: opts.IntegrationsController, + LogsParsingPipelineController: opts.LogsParsingPipelineController, + Cache: opts.Cache, + FluxInterval: opts.FluxInterval, + }) + + if err != nil { + return nil, err + } + + ah := &APIHandler{ + opts: opts, + APIHandler: *baseHandler, + } + return ah, nil +} + +func (ah *APIHandler) FF() baseint.FeatureLookup { + return ah.opts.FeatureFlags +} + +func (ah *APIHandler) RM() *rules.Manager { + return ah.opts.RulesManager +} + +func (ah *APIHandler) LM() *license.Manager { + return ah.opts.LicenseManager +} + +func (ah *APIHandler) UM() *usage.Manager { + return ah.opts.UsageManager +} + +func (ah *APIHandler) AppDao() dao.ModelDao { + return ah.opts.AppDao +} + +func (ah *APIHandler) Gateway() *httputil.ReverseProxy { + return ah.opts.Gateway +} + +func (ah *APIHandler) CheckFeature(f string) bool { + err := ah.FF().CheckFeature(f) + return err == nil +} + +// RegisterRoutes registers routes for this handler on the given router +func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddleware) { + // note: add ee override methods first + + // routes available only in ee version + router.HandleFunc("/api/v1/licenses", + am.AdminAccess(ah.listLicenses)). + Methods(http.MethodGet) + + router.HandleFunc("/api/v1/licenses", + am.AdminAccess(ah.applyLicense)). + Methods(http.MethodPost) + + router.HandleFunc("/api/v1/featureFlags", + am.OpenAccess(ah.getFeatureFlags)). + Methods(http.MethodGet) + + router.HandleFunc("/api/v1/loginPrecheck", + am.OpenAccess(ah.precheckLogin)). + Methods(http.MethodGet) + + // paid plans specific routes + router.HandleFunc("/api/v1/complete/saml", + am.OpenAccess(ah.receiveSAML)). + Methods(http.MethodPost) + + router.HandleFunc("/api/v1/complete/google", + am.OpenAccess(ah.receiveGoogleAuth)). + Methods(http.MethodGet) + + router.HandleFunc("/api/v1/orgs/{orgId}/domains", + am.AdminAccess(ah.listDomainsByOrg)). + Methods(http.MethodGet) + + router.HandleFunc("/api/v1/domains", + am.AdminAccess(ah.postDomain)). + Methods(http.MethodPost) + + router.HandleFunc("/api/v1/domains/{id}", + am.AdminAccess(ah.putDomain)). + Methods(http.MethodPut) + + router.HandleFunc("/api/v1/domains/{id}", + am.AdminAccess(ah.deleteDomain)). + Methods(http.MethodDelete) + + // base overrides + router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(ah.searchTraces)).Methods(http.MethodGet) + + // PAT APIs + router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.getPATs)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.updatePAT)).Methods(http.MethodPut) + router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.revokePAT)).Methods(http.MethodDelete) + + router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.checkout)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.portalSession)).Methods(http.MethodPost) + + router.HandleFunc("/api/v1/dashboards/{uuid}/lock", am.EditAccess(ah.lockDashboard)).Methods(http.MethodPut) + router.HandleFunc("/api/v1/dashboards/{uuid}/unlock", am.EditAccess(ah.unlockDashboard)).Methods(http.MethodPut) + + router.HandleFunc("/api/v2/licenses", + am.ViewAccess(ah.listLicensesV2)). + Methods(http.MethodGet) + + // Gateway + router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.AdminAccess(ah.ServeGatewayHTTP)) + + ah.APIHandler.RegisterRoutes(router, am) + +} + +func (ah *APIHandler) getVersion(w http.ResponseWriter, r *http.Request) { + version := version.GetVersion() + versionResponse := basemodel.GetVersionResponse{ + Version: version, + EE: "Y", + SetupCompleted: ah.SetupCompleted, + } + + ah.WriteJSON(w, r, versionResponse) +} diff --git a/signoz/ee/query-service/app/api/auth.go b/signoz/ee/query-service/app/api/auth.go new file mode 100644 index 0000000..9a28fce --- /dev/null +++ b/signoz/ee/query-service/app/api/auth.go @@ -0,0 +1,342 @@ +package api + +import ( + "context" + "encoding/base64" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + + "github.com/gorilla/mux" + "go.uber.org/zap" + + "go.signoz.io/signoz/ee/query-service/constants" + "go.signoz.io/signoz/ee/query-service/model" + baseauth "go.signoz.io/signoz/pkg/query-service/auth" + basemodel "go.signoz.io/signoz/pkg/query-service/model" +) + +func parseRequest(r *http.Request, req interface{}) error { + defer r.Body.Close() + requestBody, err := io.ReadAll(r.Body) + if err != nil { + return err + } + + err = json.Unmarshal(requestBody, &req) + return err +} + +// loginUser overrides base handler and considers SSO case. +func (ah *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) { + + req := basemodel.LoginRequest{} + err := parseRequest(r, &req) + if err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + + ctx := context.Background() + + if req.Email != "" && ah.CheckFeature(model.SSO) { + var apierr basemodel.BaseApiError + _, apierr = ah.AppDao().CanUsePassword(ctx, req.Email) + if apierr != nil && !apierr.IsNil() { + RespondError(w, apierr, nil) + } + } + + // if all looks good, call auth + resp, err := baseauth.Login(ctx, &req) + if ah.HandleError(w, err, http.StatusUnauthorized) { + return + } + + ah.WriteJSON(w, r, resp) +} + +// registerUser registers a user and responds with a precheck +// so the front-end can decide the login method +func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { + + if !ah.CheckFeature(model.SSO) { + ah.APIHandler.Register(w, r) + return + } + + ctx := context.Background() + var req *baseauth.RegisterRequest + + defer r.Body.Close() + requestBody, err := io.ReadAll(r.Body) + if err != nil { + zap.L().Error("received no input in api", zap.Error(err)) + RespondError(w, model.BadRequest(err), nil) + return + } + + err = json.Unmarshal(requestBody, &req) + + if err != nil { + zap.L().Error("received invalid user registration request", zap.Error(err)) + RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil) + return + } + + // get invite object + invite, err := baseauth.ValidateInvite(ctx, req) + if err != nil { + zap.L().Error("failed to validate invite token", zap.Error(err)) + RespondError(w, model.BadRequest(err), nil) + return + } + + if invite == nil { + zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err)) + RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil) + return + } + + // get auth domain from email domain + domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email) + if apierr != nil { + zap.L().Error("failed to get domain from email", zap.Error(apierr)) + RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil) + } + + precheckResp := &basemodel.PrecheckResponse{ + SSO: false, + IsUser: false, + } + + if domain != nil && domain.SsoEnabled { + // sso is enabled, create user and respond precheck data + user, apierr := baseauth.RegisterInvitedUser(ctx, req, true) + if apierr != nil { + RespondError(w, apierr, nil) + return + } + + var precheckError basemodel.BaseApiError + + precheckResp, precheckError = ah.AppDao().PrecheckLogin(ctx, user.Email, req.SourceUrl) + if precheckError != nil { + RespondError(w, precheckError, precheckResp) + } + + } else { + // no-sso, validate password + if err := baseauth.ValidatePassword(req.Password); err != nil { + RespondError(w, model.InternalError(fmt.Errorf("password is not in a valid format")), nil) + return + } + + _, registerError := baseauth.Register(ctx, req) + if !registerError.IsNil() { + RespondError(w, apierr, nil) + return + } + + precheckResp.IsUser = true + } + + ah.Respond(w, precheckResp) +} + +// getInvite returns the invite object details for the given invite token. We do not need to +// protect this API because invite token itself is meant to be private. +func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) { + token := mux.Vars(r)["token"] + sourceUrl := r.URL.Query().Get("ref") + ctx := context.Background() + + inviteObject, err := baseauth.GetInvite(context.Background(), token) + if err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + + resp := model.GettableInvitation{ + InvitationResponseObject: inviteObject, + } + + precheck, apierr := ah.AppDao().PrecheckLogin(ctx, inviteObject.Email, sourceUrl) + resp.Precheck = precheck + + if apierr != nil { + RespondError(w, apierr, resp) + } + + ah.WriteJSON(w, r, resp) +} + +// PrecheckLogin enables browser login page to display appropriate +// login methods +func (ah *APIHandler) precheckLogin(w http.ResponseWriter, r *http.Request) { + ctx := context.Background() + + email := r.URL.Query().Get("email") + sourceUrl := r.URL.Query().Get("ref") + + resp, apierr := ah.AppDao().PrecheckLogin(ctx, email, sourceUrl) + if apierr != nil { + RespondError(w, apierr, resp) + } + + ah.Respond(w, resp) +} + +func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) { + ssoError := []byte("Login failed. Please contact your system administrator") + dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError))) + base64.StdEncoding.Encode(dst, ssoError) + + http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther) +} + +// receiveGoogleAuth completes google OAuth response and forwards a request +// to front-end to sign user in +func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) { + redirectUri := constants.GetDefaultSiteURL() + ctx := context.Background() + + if !ah.CheckFeature(model.SSO) { + zap.L().Error("[receiveGoogleAuth] sso requested but feature unavailable in org domain") + http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently) + return + } + + q := r.URL.Query() + if errType := q.Get("error"); errType != "" { + zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description"))) + http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently) + return + } + + relayState := q.Get("state") + zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState)) + + parsedState, err := url.Parse(relayState) + if err != nil || relayState == "" { + zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) + handleSsoError(w, r, redirectUri) + return + } + + // upgrade redirect url from the relay state for better accuracy + redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login") + + // fetch domain by parsing relay state. + domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState) + if err != nil { + handleSsoError(w, r, redirectUri) + return + } + + // now that we have domain, use domain to fetch sso settings. + // prepare google callback handler using parsedState - + // which contains redirect URL (front-end endpoint) + callbackHandler, err := domain.PrepareGoogleOAuthProvider(parsedState) + if err != nil { + zap.L().Error("[receiveGoogleAuth] failed to prepare google oauth provider", zap.String("domain", domain.String()), zap.Error(err)) + handleSsoError(w, r, redirectUri) + return + } + + identity, err := callbackHandler.HandleCallback(r) + if err != nil { + zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback ", zap.String("domain", domain.String()), zap.Error(err)) + handleSsoError(w, r, redirectUri) + return + } + + nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, identity.Email) + if err != nil { + zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err)) + handleSsoError(w, r, redirectUri) + return + } + + http.Redirect(w, r, nextPage, http.StatusSeeOther) +} + +// receiveSAML completes a SAML request and gets user logged in +func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) { + // this is the source url that initiated the login request + redirectUri := constants.GetDefaultSiteURL() + ctx := context.Background() + + if !ah.CheckFeature(model.SSO) { + zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain") + http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently) + return + } + + err := r.ParseForm() + if err != nil { + zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) + handleSsoError(w, r, redirectUri) + return + } + + // the relay state is sent when a login request is submitted to + // Idp. + relayState := r.FormValue("RelayState") + zap.L().Debug("[receiveML] relay state", zap.String("relayState", relayState)) + + parsedState, err := url.Parse(relayState) + if err != nil || relayState == "" { + zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) + handleSsoError(w, r, redirectUri) + return + } + + // upgrade redirect url from the relay state for better accuracy + redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login") + + // fetch domain by parsing relay state. + domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState) + if err != nil { + handleSsoError(w, r, redirectUri) + return + } + + sp, err := domain.PrepareSamlRequest(parsedState) + if err != nil { + zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err)) + handleSsoError(w, r, redirectUri) + return + } + + assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse")) + if err != nil { + zap.L().Error("[receiveSAML] failed to retrieve assertion info from saml response", zap.String("domain", domain.String()), zap.Error(err)) + handleSsoError(w, r, redirectUri) + return + } + + if assertionInfo.WarningInfo.InvalidTime { + zap.L().Error("[receiveSAML] expired saml response", zap.String("domain", domain.String()), zap.Error(err)) + handleSsoError(w, r, redirectUri) + return + } + + email := assertionInfo.NameID + if email == "" { + zap.L().Error("[receiveSAML] invalid email in the SSO response", zap.String("domain", domain.String())) + handleSsoError(w, r, redirectUri) + return + } + + nextPage, err := ah.AppDao().PrepareSsoRedirect(ctx, redirectUri, email) + if err != nil { + zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err)) + handleSsoError(w, r, redirectUri) + return + } + + http.Redirect(w, r, nextPage, http.StatusSeeOther) +} diff --git a/signoz/ee/query-service/app/api/dashboard.go b/signoz/ee/query-service/app/api/dashboard.go new file mode 100644 index 0000000..51fe6c2 --- /dev/null +++ b/signoz/ee/query-service/app/api/dashboard.go @@ -0,0 +1,58 @@ +package api + +import ( + "errors" + "net/http" + "strings" + + "github.com/gorilla/mux" + "go.signoz.io/signoz/pkg/query-service/app/dashboards" + "go.signoz.io/signoz/pkg/query-service/auth" + "go.signoz.io/signoz/pkg/query-service/common" + "go.signoz.io/signoz/pkg/query-service/model" +) + +func (ah *APIHandler) lockDashboard(w http.ResponseWriter, r *http.Request) { + ah.lockUnlockDashboard(w, r, true) +} + +func (ah *APIHandler) unlockDashboard(w http.ResponseWriter, r *http.Request) { + ah.lockUnlockDashboard(w, r, false) +} + +func (ah *APIHandler) lockUnlockDashboard(w http.ResponseWriter, r *http.Request, lock bool) { + // Locking can only be done by the owner of the dashboard + // or an admin + + // - Fetch the dashboard + // - Check if the user is the owner or an admin + // - If yes, lock/unlock the dashboard + // - If no, return 403 + + // Get the dashboard UUID from the request + uuid := mux.Vars(r)["uuid"] + if strings.HasPrefix(uuid,"integration") { + RespondError(w, &model.ApiError{Typ: model.ErrorForbidden, Err: errors.New("dashboards created by integrations cannot be unlocked")}, "You are not authorized to lock/unlock this dashboard") + return + } + dashboard, err := dashboards.GetDashboard(r.Context(), uuid) + if err != nil { + RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, err.Error()) + return + } + + user := common.GetUserFromContext(r.Context()) + if !auth.IsAdmin(user) && (dashboard.CreateBy != nil && *dashboard.CreateBy != user.Email) { + RespondError(w, &model.ApiError{Typ: model.ErrorForbidden, Err: err}, "You are not authorized to lock/unlock this dashboard") + return + } + + // Lock/Unlock the dashboard + err = dashboards.LockUnlockDashboard(r.Context(), uuid, lock) + if err != nil { + RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, err.Error()) + return + } + + ah.Respond(w, "Dashboard updated successfully") +} diff --git a/signoz/ee/query-service/app/api/domains.go b/signoz/ee/query-service/app/api/domains.go new file mode 100644 index 0000000..6456928 --- /dev/null +++ b/signoz/ee/query-service/app/api/domains.go @@ -0,0 +1,90 @@ +package api + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + + "github.com/google/uuid" + "github.com/gorilla/mux" + "go.signoz.io/signoz/ee/query-service/model" +) + +func (ah *APIHandler) listDomainsByOrg(w http.ResponseWriter, r *http.Request) { + orgId := mux.Vars(r)["orgId"] + domains, apierr := ah.AppDao().ListDomains(context.Background(), orgId) + if apierr != nil { + RespondError(w, apierr, domains) + return + } + ah.Respond(w, domains) +} + +func (ah *APIHandler) postDomain(w http.ResponseWriter, r *http.Request) { + ctx := context.Background() + + req := model.OrgDomain{} + + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + + if err := req.ValidNew(); err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + + if apierr := ah.AppDao().CreateDomain(ctx, &req); apierr != nil { + RespondError(w, apierr, nil) + return + } + + ah.Respond(w, &req) +} + +func (ah *APIHandler) putDomain(w http.ResponseWriter, r *http.Request) { + ctx := context.Background() + + domainIdStr := mux.Vars(r)["id"] + domainId, err := uuid.Parse(domainIdStr) + if err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + + req := model.OrgDomain{Id: domainId} + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + req.Id = domainId + if err := req.Valid(nil); err != nil { + RespondError(w, model.BadRequest(err), nil) + } + + if apierr := ah.AppDao().UpdateDomain(ctx, &req); apierr != nil { + RespondError(w, apierr, nil) + return + } + + ah.Respond(w, &req) +} + +func (ah *APIHandler) deleteDomain(w http.ResponseWriter, r *http.Request) { + domainIdStr := mux.Vars(r)["id"] + + domainId, err := uuid.Parse(domainIdStr) + if err != nil { + RespondError(w, model.BadRequest(fmt.Errorf("invalid domain id")), nil) + return + } + + apierr := ah.AppDao().DeleteDomain(context.Background(), domainId) + if apierr != nil { + RespondError(w, apierr, nil) + return + } + ah.Respond(w, nil) +} diff --git a/signoz/ee/query-service/app/api/featureFlags.go b/signoz/ee/query-service/app/api/featureFlags.go new file mode 100644 index 0000000..22ee798 --- /dev/null +++ b/signoz/ee/query-service/app/api/featureFlags.go @@ -0,0 +1,24 @@ +package api + +import ( + "net/http" + + basemodel "go.signoz.io/signoz/pkg/query-service/model" +) + +func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) { + featureSet, err := ah.FF().GetFeatureFlags() + if err != nil { + ah.HandleError(w, err, http.StatusInternalServerError) + return + } + if ah.opts.PreferSpanMetrics { + for idx := range featureSet { + feature := &featureSet[idx] + if feature.Name == basemodel.UseSpanMetrics { + featureSet[idx].Active = true + } + } + } + ah.Respond(w, featureSet) +} diff --git a/signoz/ee/query-service/app/api/gateway.go b/signoz/ee/query-service/app/api/gateway.go new file mode 100644 index 0000000..15d274e --- /dev/null +++ b/signoz/ee/query-service/app/api/gateway.go @@ -0,0 +1,34 @@ +package api + +import ( + "net/http" + "strings" + + "go.signoz.io/signoz/ee/query-service/integrations/gateway" +) + +func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) { + ctx := req.Context() + if !strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+gateway.AllowedPrefix) { + rw.WriteHeader(http.StatusNotFound) + return + } + + license, err := ah.LM().GetRepo().GetActiveLicense(ctx) + if err != nil { + RespondError(rw, err, nil) + return + } + + //Create headers + var licenseKey string + if license != nil { + licenseKey = license.Key + } + + req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey) + req.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000") + req.Header.Set("X-Consumer-Groups", "ns:default") + + ah.Gateway().ServeHTTP(rw, req) +} diff --git a/signoz/ee/query-service/app/api/license.go b/signoz/ee/query-service/app/api/license.go new file mode 100644 index 0000000..51cfdde --- /dev/null +++ b/signoz/ee/query-service/app/api/license.go @@ -0,0 +1,272 @@ +package api + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + + "go.signoz.io/signoz/ee/query-service/constants" + "go.signoz.io/signoz/ee/query-service/model" + "go.uber.org/zap" +) + +type DayWiseBreakdown struct { + Type string `json:"type"` + Breakdown []DayWiseData `json:"breakdown"` +} + +type DayWiseData struct { + Timestamp int64 `json:"timestamp"` + Count float64 `json:"count"` + Size float64 `json:"size"` + UnitPrice float64 `json:"unitPrice"` + Quantity float64 `json:"quantity"` + Total float64 `json:"total"` +} + +type tierBreakdown struct { + UnitPrice float64 `json:"unitPrice"` + Quantity float64 `json:"quantity"` + TierStart int64 `json:"tierStart"` + TierEnd int64 `json:"tierEnd"` + TierCost float64 `json:"tierCost"` +} + +type usageResponse struct { + Type string `json:"type"` + Unit string `json:"unit"` + Tiers []tierBreakdown `json:"tiers"` + DayWiseBreakdown DayWiseBreakdown `json:"dayWiseBreakdown"` +} + +type details struct { + Total float64 `json:"total"` + Breakdown []usageResponse `json:"breakdown"` + BaseFee float64 `json:"baseFee"` + BillTotal float64 `json:"billTotal"` +} + +type billingDetails struct { + Status string `json:"status"` + Data struct { + BillingPeriodStart int64 `json:"billingPeriodStart"` + BillingPeriodEnd int64 `json:"billingPeriodEnd"` + Details details `json:"details"` + Discount float64 `json:"discount"` + SubscriptionStatus string `json:"subscriptionStatus"` + } `json:"data"` +} + +func (ah *APIHandler) listLicenses(w http.ResponseWriter, r *http.Request) { + licenses, apiError := ah.LM().GetLicenses(context.Background()) + if apiError != nil { + RespondError(w, apiError, nil) + } + ah.Respond(w, licenses) +} + +func (ah *APIHandler) applyLicense(w http.ResponseWriter, r *http.Request) { + var l model.License + + if err := json.NewDecoder(r.Body).Decode(&l); err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + + if l.Key == "" { + RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil) + return + } + license, apiError := ah.LM().Activate(r.Context(), l.Key) + if apiError != nil { + RespondError(w, apiError, nil) + return + } + + ah.Respond(w, license) +} + +func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) { + + type checkoutResponse struct { + Status string `json:"status"` + Data struct { + RedirectURL string `json:"redirectURL"` + } `json:"data"` + } + + hClient := &http.Client{} + req, err := http.NewRequest("POST", constants.LicenseSignozIo+"/checkout", r.Body) + if err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey) + licenseResp, err := hClient.Do(req) + if err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + + // decode response body + var resp checkoutResponse + if err := json.NewDecoder(licenseResp.Body).Decode(&resp); err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + + ah.Respond(w, resp.Data) +} + +func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) { + licenseKey := r.URL.Query().Get("licenseKey") + + if licenseKey == "" { + RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil) + return + } + + billingURL := fmt.Sprintf("%s/usage?licenseKey=%s", constants.LicenseSignozIo, licenseKey) + + hClient := &http.Client{} + req, err := http.NewRequest("GET", billingURL, nil) + if err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey) + billingResp, err := hClient.Do(req) + if err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + + // decode response body + var billingResponse billingDetails + if err := json.NewDecoder(billingResp.Body).Decode(&billingResponse); err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + + // TODO(srikanthccv):Fetch the current day usage and add it to the response + ah.Respond(w, billingResponse.Data) +} + +func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { + + licenses, apiError := ah.LM().GetLicenses(context.Background()) + if apiError != nil { + RespondError(w, apiError, nil) + } + + resp := model.Licenses{ + TrialStart: -1, + TrialEnd: -1, + OnTrial: false, + WorkSpaceBlock: false, + TrialConvertedToSubscription: false, + GracePeriodEnd: -1, + Licenses: licenses, + } + + var currentActiveLicenseKey string + + for _, license := range licenses { + if license.IsCurrent { + currentActiveLicenseKey = license.Key + } + } + + // For the case when no license is applied i.e community edition + // There will be no trial details or license details + if currentActiveLicenseKey == "" { + ah.Respond(w, resp) + return + } + + // Fetch trial details + hClient := &http.Client{} + url := fmt.Sprintf("%s/trial?licenseKey=%s", constants.LicenseSignozIo, currentActiveLicenseKey) + req, err := http.NewRequest("GET", url, nil) + if err != nil { + zap.L().Error("Error while creating request for trial details", zap.Error(err)) + // If there is an error in fetching trial details, we will still return the license details + // to avoid blocking the UI + ah.Respond(w, resp) + return + } + req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey) + trialResp, err := hClient.Do(req) + if err != nil { + zap.L().Error("Error while fetching trial details", zap.Error(err)) + // If there is an error in fetching trial details, we will still return the license details + // to avoid incorrectly blocking the UI + ah.Respond(w, resp) + return + } + defer trialResp.Body.Close() + + trialRespBody, err := io.ReadAll(trialResp.Body) + + if err != nil || trialResp.StatusCode != http.StatusOK { + zap.L().Error("Error while fetching trial details", zap.Error(err)) + // If there is an error in fetching trial details, we will still return the license details + // to avoid incorrectly blocking the UI + ah.Respond(w, resp) + return + } + + // decode response body + var trialRespData model.SubscriptionServerResp + + if err := json.Unmarshal(trialRespBody, &trialRespData); err != nil { + zap.L().Error("Error while decoding trial details", zap.Error(err)) + // If there is an error in fetching trial details, we will still return the license details + // to avoid incorrectly blocking the UI + ah.Respond(w, resp) + return + } + + resp.TrialStart = trialRespData.Data.TrialStart + resp.TrialEnd = trialRespData.Data.TrialEnd + resp.OnTrial = trialRespData.Data.OnTrial + resp.WorkSpaceBlock = trialRespData.Data.WorkSpaceBlock + resp.TrialConvertedToSubscription = trialRespData.Data.TrialConvertedToSubscription + resp.GracePeriodEnd = trialRespData.Data.GracePeriodEnd + + ah.Respond(w, resp) +} + +func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) { + + type checkoutResponse struct { + Status string `json:"status"` + Data struct { + RedirectURL string `json:"redirectURL"` + } `json:"data"` + } + + hClient := &http.Client{} + req, err := http.NewRequest("POST", constants.LicenseSignozIo+"/portal", r.Body) + if err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey) + licenseResp, err := hClient.Do(req) + if err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + + // decode response body + var resp checkoutResponse + if err := json.NewDecoder(licenseResp.Body).Decode(&resp); err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + + ah.Respond(w, resp.Data) +} diff --git a/signoz/ee/query-service/app/api/pat.go b/signoz/ee/query-service/app/api/pat.go new file mode 100644 index 0000000..3ff8be7 --- /dev/null +++ b/signoz/ee/query-service/app/api/pat.go @@ -0,0 +1,165 @@ +package api + +import ( + "context" + "crypto/rand" + "encoding/base64" + "encoding/json" + "fmt" + "net/http" + "time" + + "github.com/gorilla/mux" + "go.signoz.io/signoz/ee/query-service/model" + "go.signoz.io/signoz/pkg/query-service/auth" + baseconstants "go.signoz.io/signoz/pkg/query-service/constants" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.uber.org/zap" +) + +func generatePATToken() string { + // Generate a 32-byte random token. + token := make([]byte, 32) + rand.Read(token) + // Encode the token in base64. + encodedToken := base64.StdEncoding.EncodeToString(token) + return encodedToken +} + +func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) { + ctx := context.Background() + + req := model.CreatePATRequestBody{} + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + user, err := auth.GetUserFromRequest(r) + if err != nil { + RespondError(w, &model.ApiError{ + Typ: model.ErrorUnauthorized, + Err: err, + }, nil) + return + } + pat := model.PAT{ + Name: req.Name, + Role: req.Role, + ExpiresAt: req.ExpiresInDays, + } + err = validatePATRequest(pat) + if err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + + // All the PATs are associated with the user creating the PAT. + pat.UserID = user.Id + pat.CreatedAt = time.Now().Unix() + pat.UpdatedAt = time.Now().Unix() + pat.LastUsed = 0 + pat.Token = generatePATToken() + + if pat.ExpiresAt != 0 { + // convert expiresAt to unix timestamp from days + pat.ExpiresAt = time.Now().Unix() + (pat.ExpiresAt * 24 * 60 * 60) + } + + zap.L().Info("Got Create PAT request", zap.Any("pat", pat)) + var apierr basemodel.BaseApiError + if pat, apierr = ah.AppDao().CreatePAT(ctx, pat); apierr != nil { + RespondError(w, apierr, nil) + return + } + + ah.Respond(w, &pat) +} + +func validatePATRequest(req model.PAT) error { + if req.Role == "" || (req.Role != baseconstants.ViewerGroup && req.Role != baseconstants.EditorGroup && req.Role != baseconstants.AdminGroup) { + return fmt.Errorf("valid role is required") + } + if req.ExpiresAt < 0 { + return fmt.Errorf("valid expiresAt is required") + } + if req.Name == "" { + return fmt.Errorf("valid name is required") + } + return nil +} + +func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) { + ctx := context.Background() + + req := model.PAT{} + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + + user, err := auth.GetUserFromRequest(r) + if err != nil { + RespondError(w, &model.ApiError{ + Typ: model.ErrorUnauthorized, + Err: err, + }, nil) + return + } + + err = validatePATRequest(req) + if err != nil { + RespondError(w, model.BadRequest(err), nil) + return + } + + req.UpdatedByUserID = user.Id + id := mux.Vars(r)["id"] + req.UpdatedAt = time.Now().Unix() + zap.L().Info("Got Update PAT request", zap.Any("pat", req)) + var apierr basemodel.BaseApiError + if apierr = ah.AppDao().UpdatePAT(ctx, req, id); apierr != nil { + RespondError(w, apierr, nil) + return + } + + ah.Respond(w, map[string]string{"data": "pat updated successfully"}) +} + +func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) { + ctx := context.Background() + user, err := auth.GetUserFromRequest(r) + if err != nil { + RespondError(w, &model.ApiError{ + Typ: model.ErrorUnauthorized, + Err: err, + }, nil) + return + } + zap.L().Info("Get PATs for user", zap.String("user_id", user.Id)) + pats, apierr := ah.AppDao().ListPATs(ctx) + if apierr != nil { + RespondError(w, apierr, nil) + return + } + ah.Respond(w, pats) +} + +func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) { + ctx := context.Background() + id := mux.Vars(r)["id"] + user, err := auth.GetUserFromRequest(r) + if err != nil { + RespondError(w, &model.ApiError{ + Typ: model.ErrorUnauthorized, + Err: err, + }, nil) + return + } + + zap.L().Info("Revoke PAT with id", zap.String("id", id)) + if apierr := ah.AppDao().RevokePAT(ctx, id, user.Id); apierr != nil { + RespondError(w, apierr, nil) + return + } + ah.Respond(w, map[string]string{"data": "pat revoked successfully"}) +} diff --git a/signoz/ee/query-service/app/api/response.go b/signoz/ee/query-service/app/api/response.go new file mode 100644 index 0000000..fef5f89 --- /dev/null +++ b/signoz/ee/query-service/app/api/response.go @@ -0,0 +1,12 @@ +package api + +import ( + "net/http" + + baseapp "go.signoz.io/signoz/pkg/query-service/app" + basemodel "go.signoz.io/signoz/pkg/query-service/model" +) + +func RespondError(w http.ResponseWriter, apiErr basemodel.BaseApiError, data interface{}) { + baseapp.RespondError(w, apiErr, data) +} diff --git a/signoz/ee/query-service/app/api/traces.go b/signoz/ee/query-service/app/api/traces.go new file mode 100644 index 0000000..3864fc6 --- /dev/null +++ b/signoz/ee/query-service/app/api/traces.go @@ -0,0 +1,33 @@ +package api + +import ( + "net/http" + + "go.signoz.io/signoz/ee/query-service/app/db" + "go.signoz.io/signoz/ee/query-service/model" + baseapp "go.signoz.io/signoz/pkg/query-service/app" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.uber.org/zap" +) + +func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) { + + if !ah.CheckFeature(basemodel.SmartTraceDetail) { + zap.L().Info("SmartTraceDetail feature is not enabled in this plan") + ah.APIHandler.SearchTraces(w, r) + return + } + searchTracesParams, err := baseapp.ParseSearchTracesParams(r) + if err != nil { + RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params") + return + } + + result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm) + if ah.HandleError(w, err, http.StatusBadRequest) { + return + } + + ah.WriteJSON(w, r, result) + +} diff --git a/signoz/ee/query-service/app/db/metrics.go b/signoz/ee/query-service/app/db/metrics.go new file mode 100644 index 0000000..0cc8a55 --- /dev/null +++ b/signoz/ee/query-service/app/db/metrics.go @@ -0,0 +1,401 @@ +package db + +import ( + "context" + "crypto/md5" + "encoding/json" + "fmt" + "reflect" + "regexp" + "sort" + "strings" + "time" + + "go.signoz.io/signoz/ee/query-service/model" + baseconst "go.signoz.io/signoz/pkg/query-service/constants" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.signoz.io/signoz/pkg/query-service/utils" + "go.uber.org/zap" +) + +// GetMetricResultEE runs the query and returns list of time series +func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) { + + defer utils.Elapsed("GetMetricResult", nil)() + zap.L().Info("Executing metric result query: ", zap.String("query", query)) + + var hash string + // If getSubTreeSpans function is used in the clickhouse query + if strings.Contains(query, "getSubTreeSpans(") { + var err error + query, hash, err = r.getSubTreeSpansCustomFunction(ctx, query, hash) + if err == fmt.Errorf("no spans found for the given query") { + return nil, "", nil + } + if err != nil { + return nil, "", err + } + } + + rows, err := r.conn.Query(ctx, query) + if err != nil { + zap.L().Error("Error in processing query", zap.Error(err)) + return nil, "", fmt.Errorf("error in processing query") + } + + var ( + columnTypes = rows.ColumnTypes() + columnNames = rows.Columns() + vars = make([]interface{}, len(columnTypes)) + ) + for i := range columnTypes { + vars[i] = reflect.New(columnTypes[i].ScanType()).Interface() + } + // when group by is applied, each combination of cartesian product + // of attributes is separate series. each item in metricPointsMap + // represent a unique series. + metricPointsMap := make(map[string][]basemodel.MetricPoint) + // attribute key-value pairs for each group selection + attributesMap := make(map[string]map[string]string) + + defer rows.Close() + for rows.Next() { + if err := rows.Scan(vars...); err != nil { + return nil, "", err + } + var groupBy []string + var metricPoint basemodel.MetricPoint + groupAttributes := make(map[string]string) + // Assuming that the end result row contains a timestamp, value and option labels + // Label key and value are both strings. + for idx, v := range vars { + colName := columnNames[idx] + switch v := v.(type) { + case *string: + // special case for returning all labels + if colName == "fullLabels" { + var metric map[string]string + err := json.Unmarshal([]byte(*v), &metric) + if err != nil { + return nil, "", err + } + for key, val := range metric { + groupBy = append(groupBy, val) + groupAttributes[key] = val + } + } else { + groupBy = append(groupBy, *v) + groupAttributes[colName] = *v + } + case *time.Time: + metricPoint.Timestamp = v.UnixMilli() + case *float64: + metricPoint.Value = *v + case **float64: + // ch seems to return this type when column is derived from + // SELECT count(*)/ SELECT count(*) + floatVal := *v + if floatVal != nil { + metricPoint.Value = *floatVal + } + case *float32: + float32Val := float32(*v) + metricPoint.Value = float64(float32Val) + case *uint8, *uint64, *uint16, *uint32: + if _, ok := baseconst.ReservedColumnTargetAliases[colName]; ok { + metricPoint.Value = float64(reflect.ValueOf(v).Elem().Uint()) + } else { + groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())) + groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()) + } + case *int8, *int16, *int32, *int64: + if _, ok := baseconst.ReservedColumnTargetAliases[colName]; ok { + metricPoint.Value = float64(reflect.ValueOf(v).Elem().Int()) + } else { + groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())) + groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()) + } + default: + zap.L().Error("invalid var found in metric builder query result", zap.Any("var", v), zap.String("colName", colName)) + } + } + sort.Strings(groupBy) + key := strings.Join(groupBy, "") + attributesMap[key] = groupAttributes + metricPointsMap[key] = append(metricPointsMap[key], metricPoint) + } + + var seriesList []*basemodel.Series + for key := range metricPointsMap { + points := metricPointsMap[key] + // first point in each series could be invalid since the + // aggregations are applied with point from prev series + if len(points) != 0 && len(points) > 1 { + points = points[1:] + } + attributes := attributesMap[key] + series := basemodel.Series{Labels: attributes, Points: points} + seriesList = append(seriesList, &series) + } + // err = r.conn.Exec(ctx, "DROP TEMPORARY TABLE IF EXISTS getSubTreeSpans"+hash) + // if err != nil { + // zap.L().Error("Error in dropping temporary table: ", err) + // return nil, err + // } + if hash == "" { + return seriesList, hash, nil + } else { + return seriesList, "getSubTreeSpans" + hash, nil + } +} + +func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, query string, hash string) (string, string, error) { + + zap.L().Debug("Executing getSubTreeSpans function") + + // str1 := `select fromUnixTimestamp64Milli(intDiv( toUnixTimestamp64Milli ( timestamp ), 100) * 100) AS interval, toFloat64(count()) as count from (select timestamp, spanId, parentSpanId, durationNano from getSubTreeSpans(select * from signoz_traces.signoz_index_v2 where serviceName='frontend' and name='/driver.DriverService/FindNearest' and traceID='00000000000000004b0a863cb5ed7681') where name='FindDriverIDs' group by interval order by interval asc;` + + // process the query to fetch subTree query + var subtreeInput string + query, subtreeInput, hash = processQuery(query, hash) + + err := r.conn.Exec(ctx, "DROP TABLE IF EXISTS getSubTreeSpans"+hash) + if err != nil { + zap.L().Error("Error in dropping temporary table", zap.Error(err)) + return query, hash, err + } + + // Create temporary table to store the getSubTreeSpans() results + zap.L().Debug("Creating temporary table getSubTreeSpans", zap.String("hash", hash)) + err = r.conn.Exec(ctx, "CREATE TABLE IF NOT EXISTS "+"getSubTreeSpans"+hash+" (timestamp DateTime64(9) CODEC(DoubleDelta, LZ4), traceID FixedString(32) CODEC(ZSTD(1)), spanID String CODEC(ZSTD(1)), parentSpanID String CODEC(ZSTD(1)), rootSpanID String CODEC(ZSTD(1)), serviceName LowCardinality(String) CODEC(ZSTD(1)), name LowCardinality(String) CODEC(ZSTD(1)), rootName LowCardinality(String) CODEC(ZSTD(1)), durationNano UInt64 CODEC(T64, ZSTD(1)), kind Int8 CODEC(T64, ZSTD(1)), tagMap Map(LowCardinality(String), String) CODEC(ZSTD(1)), events Array(String) CODEC(ZSTD(2))) ENGINE = MergeTree() ORDER BY (timestamp)") + if err != nil { + zap.L().Error("Error in creating temporary table", zap.Error(err)) + return query, hash, err + } + + var getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse + getSpansSubQuery := subtreeInput + // Execute the subTree query + zap.L().Debug("Executing subTree query", zap.String("query", getSpansSubQuery)) + err = r.conn.Select(ctx, &getSpansSubQueryDBResponses, getSpansSubQuery) + + // zap.L().Info(getSpansSubQuery) + + if err != nil { + zap.L().Error("Error in processing sql query", zap.Error(err)) + return query, hash, fmt.Errorf("error in processing sql query") + } + + var searchScanResponses []basemodel.SearchSpanDBResponseItem + + // TODO : @ankit: I think the algorithm does not need to assume that subtrees are from the same TraceID. We can take this as an improvement later. + // Fetch all the spans from of same TraceID so that we can build subtree + modelQuery := fmt.Sprintf("SELECT timestamp, traceID, model FROM %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable) + + if len(getSpansSubQueryDBResponses) == 0 { + return query, hash, fmt.Errorf("no spans found for the given query") + } + zap.L().Debug("Executing query to fetch all the spans from the same TraceID: ", zap.String("modelQuery", modelQuery)) + err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID) + + if err != nil { + zap.L().Error("Error in processing sql query", zap.Error(err)) + return query, hash, fmt.Errorf("error in processing sql query") + } + + // Process model to fetch the spans + zap.L().Debug("Processing model to fetch the spans") + searchSpanResponses := []basemodel.SearchSpanResponseItem{} + for _, item := range searchScanResponses { + var jsonItem basemodel.SearchSpanResponseItem + json.Unmarshal([]byte(item.Model), &jsonItem) + jsonItem.TimeUnixNano = uint64(item.Timestamp.UnixNano()) + if jsonItem.Events == nil { + jsonItem.Events = []string{} + } + searchSpanResponses = append(searchSpanResponses, jsonItem) + } + // Build the subtree and store all the subtree spans in temporary table getSubTreeSpans+hash + // Use map to store pointer to the spans to avoid duplicates and save memory + zap.L().Debug("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash)) + + treeSearchResponse, err := getSubTreeAlgorithm(searchSpanResponses, getSpansSubQueryDBResponses) + if err != nil { + zap.L().Error("Error in getSubTreeAlgorithm function", zap.Error(err)) + return query, hash, err + } + zap.L().Debug("Preparing batch to store subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash)) + statement, err := r.conn.PrepareBatch(context.Background(), fmt.Sprintf("INSERT INTO getSubTreeSpans"+hash)) + if err != nil { + zap.L().Error("Error in preparing batch statement", zap.Error(err)) + return query, hash, err + } + for _, span := range treeSearchResponse { + var parentID string + if len(span.References) > 0 && span.References[0].RefType == "CHILD_OF" { + parentID = span.References[0].SpanId + } + err = statement.Append( + time.Unix(0, int64(span.TimeUnixNano)), + span.TraceID, + span.SpanID, + parentID, + span.RootSpanID, + span.ServiceName, + span.Name, + span.RootName, + uint64(span.DurationNano), + int8(span.Kind), + span.TagMap, + span.Events, + ) + if err != nil { + zap.L().Error("Error in processing sql query", zap.Error(err)) + return query, hash, err + } + } + zap.L().Debug("Inserting the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash)) + err = statement.Send() + if err != nil { + zap.L().Error("Error in sending statement", zap.Error(err)) + return query, hash, err + } + return query, hash, nil +} + +//lint:ignore SA4009 return hash is feeded to the query +func processQuery(query string, hash string) (string, string, string) { + re3 := regexp.MustCompile(`getSubTreeSpans`) + + submatchall3 := re3.FindAllStringIndex(query, -1) + getSubtreeSpansMatchIndex := submatchall3[0][1] + + query2countParenthesis := query[getSubtreeSpansMatchIndex:] + + sqlCompleteIndex := 0 + countParenthesisImbalance := 0 + for i, char := range query2countParenthesis { + + if string(char) == "(" { + countParenthesisImbalance += 1 + } + if string(char) == ")" { + countParenthesisImbalance -= 1 + } + if countParenthesisImbalance == 0 { + sqlCompleteIndex = i + break + } + } + subtreeInput := query2countParenthesis[1:sqlCompleteIndex] + + // hash the subtreeInput + hmd5 := md5.Sum([]byte(subtreeInput)) + hash = fmt.Sprintf("%x", hmd5) + + // Reformat the query to use the getSubTreeSpans function + query = query[:getSubtreeSpansMatchIndex] + hash + " " + query2countParenthesis[sqlCompleteIndex+1:] + return query, subtreeInput, hash +} + +// getSubTreeAlgorithm is an algorithm to build the subtrees of the spans and return the list of spans +func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse) (map[string]*basemodel.SearchSpanResponseItem, error) { + + var spans []*model.SpanForTraceDetails + for _, spanItem := range payload { + var parentID string + if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" { + parentID = spanItem.References[0].SpanId + } + span := &model.SpanForTraceDetails{ + TimeUnixNano: spanItem.TimeUnixNano, + SpanID: spanItem.SpanID, + TraceID: spanItem.TraceID, + ServiceName: spanItem.ServiceName, + Name: spanItem.Name, + Kind: spanItem.Kind, + DurationNano: spanItem.DurationNano, + TagMap: spanItem.TagMap, + ParentID: parentID, + Events: spanItem.Events, + HasError: spanItem.HasError, + } + spans = append(spans, span) + } + + zap.L().Debug("Building Tree") + roots, err := buildSpanTrees(&spans) + if err != nil { + return nil, err + } + searchSpansResult := make(map[string]*basemodel.SearchSpanResponseItem) + // Every span which was fetched from getSubTree Input SQL query is considered root + // For each root, get the subtree spans + for _, getSpansSubQueryDBResponse := range getSpansSubQueryDBResponses { + targetSpan := &model.SpanForTraceDetails{} + // zap.L().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses))) + // Search target span object in the tree + for _, root := range roots { + targetSpan, err = breadthFirstSearch(root, getSpansSubQueryDBResponse.SpanID) + if targetSpan != nil { + break + } + if err != nil { + zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err)) + return nil, err + } + } + if targetSpan == nil { + return nil, nil + } + // Build subtree for the target span + // Mark the target span as root by setting parent ID as empty string + targetSpan.ParentID = "" + preParents := []*model.SpanForTraceDetails{targetSpan} + children := []*model.SpanForTraceDetails{} + + // Get the subtree child spans + for i := 0; len(preParents) != 0; i++ { + parents := []*model.SpanForTraceDetails{} + for _, parent := range preParents { + children = append(children, parent.Children...) + parents = append(parents, parent.Children...) + } + preParents = parents + } + + resultSpans := children + // Add the target span to the result spans + resultSpans = append(resultSpans, targetSpan) + + for _, item := range resultSpans { + references := []basemodel.OtelSpanRef{ + { + TraceId: item.TraceID, + SpanId: item.ParentID, + RefType: "CHILD_OF", + }, + } + + if item.Events == nil { + item.Events = []string{} + } + searchSpansResult[item.SpanID] = &basemodel.SearchSpanResponseItem{ + TimeUnixNano: item.TimeUnixNano, + SpanID: item.SpanID, + TraceID: item.TraceID, + ServiceName: item.ServiceName, + Name: item.Name, + Kind: item.Kind, + References: references, + DurationNano: item.DurationNano, + TagMap: item.TagMap, + Events: item.Events, + HasError: item.HasError, + RootSpanID: getSpansSubQueryDBResponse.SpanID, + RootName: targetSpan.Name, + } + } + } + return searchSpansResult, nil +} diff --git a/signoz/ee/query-service/app/db/reader.go b/signoz/ee/query-service/app/db/reader.go new file mode 100644 index 0000000..b832605 --- /dev/null +++ b/signoz/ee/query-service/app/db/reader.go @@ -0,0 +1,39 @@ +package db + +import ( + "time" + + "github.com/ClickHouse/clickhouse-go/v2" + + "github.com/jmoiron/sqlx" + + basechr "go.signoz.io/signoz/pkg/query-service/app/clickhouseReader" + "go.signoz.io/signoz/pkg/query-service/interfaces" +) + +type ClickhouseReader struct { + conn clickhouse.Conn + appdb *sqlx.DB + *basechr.ClickHouseReader +} + +func NewDataConnector( + localDB *sqlx.DB, + promConfigPath string, + lm interfaces.FeatureLookup, + maxIdleConns int, + maxOpenConns int, + dialTimeout time.Duration, + cluster string, +) *ClickhouseReader { + ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout, cluster) + return &ClickhouseReader{ + conn: ch.GetConn(), + appdb: localDB, + ClickHouseReader: ch, + } +} + +func (r *ClickhouseReader) Start(readerReady chan bool) { + r.ClickHouseReader.Start(readerReady) +} diff --git a/signoz/ee/query-service/app/db/trace.go b/signoz/ee/query-service/app/db/trace.go new file mode 100644 index 0000000..497f56e --- /dev/null +++ b/signoz/ee/query-service/app/db/trace.go @@ -0,0 +1,228 @@ +package db + +import ( + "errors" + "strconv" + + "go.signoz.io/signoz/ee/query-service/model" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.uber.org/zap" +) + +// SmartTraceAlgorithm is an algorithm to find the target span and build a tree of spans around it with the given levelUp and levelDown parameters and the given spanLimit +func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanId string, levelUp int, levelDown int, spanLimit int) ([]basemodel.SearchSpansResult, error) { + var spans []*model.SpanForTraceDetails + + // if targetSpanId is null or not present then randomly select a span as targetSpanId + if (targetSpanId == "" || targetSpanId == "null") && len(payload) > 0 { + targetSpanId = payload[0].SpanID + } + + // Build a slice of spans from the payload + for _, spanItem := range payload { + var parentID string + if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" { + parentID = spanItem.References[0].SpanId + } + span := &model.SpanForTraceDetails{ + TimeUnixNano: spanItem.TimeUnixNano, + SpanID: spanItem.SpanID, + TraceID: spanItem.TraceID, + ServiceName: spanItem.ServiceName, + Name: spanItem.Name, + Kind: spanItem.Kind, + DurationNano: spanItem.DurationNano, + TagMap: spanItem.TagMap, + ParentID: parentID, + Events: spanItem.Events, + HasError: spanItem.HasError, + } + spans = append(spans, span) + } + + // Build span trees from the spans + roots, err := buildSpanTrees(&spans) + if err != nil { + return nil, err + } + targetSpan := &model.SpanForTraceDetails{} + + // Find the target span in the span trees + for _, root := range roots { + targetSpan, err = breadthFirstSearch(root, targetSpanId) + if targetSpan != nil { + break + } + if err != nil { + zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err)) + return nil, err + } + } + + // If the target span is not found, return span not found error + if targetSpan == nil { + return nil, errors.New("span not found") + } + + // Build the final result + parents := []*model.SpanForTraceDetails{} + + // Get the parent spans of the target span up to the given levelUp parameter and spanLimit + preParent := targetSpan + for i := 0; i < levelUp+1; i++ { + if i == levelUp { + preParent.ParentID = "" + } + if spanLimit-len(preParent.Children) <= 0 { + parents = append(parents, preParent) + parents = append(parents, preParent.Children[:spanLimit]...) + spanLimit -= (len(preParent.Children[:spanLimit]) + 1) + preParent.ParentID = "" + break + } + parents = append(parents, preParent) + parents = append(parents, preParent.Children...) + spanLimit -= (len(preParent.Children) + 1) + preParent = preParent.ParentSpan + if preParent == nil { + break + } + } + + // Get the child spans of the target span until the given levelDown and spanLimit + preParents := []*model.SpanForTraceDetails{targetSpan} + children := []*model.SpanForTraceDetails{} + + for i := 0; i < levelDown && len(preParents) != 0 && spanLimit > 0; i++ { + parents := []*model.SpanForTraceDetails{} + for _, parent := range preParents { + if spanLimit-len(parent.Children) <= 0 { + children = append(children, parent.Children[:spanLimit]...) + spanLimit -= len(parent.Children[:spanLimit]) + break + } + children = append(children, parent.Children...) + parents = append(parents, parent.Children...) + } + preParents = parents + } + + // Store the final list of spans in the resultSpanSet map to avoid duplicates + resultSpansSet := make(map[*model.SpanForTraceDetails]struct{}) + resultSpansSet[targetSpan] = struct{}{} + for _, parent := range parents { + resultSpansSet[parent] = struct{}{} + } + for _, child := range children { + resultSpansSet[child] = struct{}{} + } + + searchSpansResult := []basemodel.SearchSpansResult{{ + Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"}, + Events: make([][]interface{}, len(resultSpansSet)), + IsSubTree: true, + }, + } + + // Convert the resultSpansSet map to searchSpansResult + i := 0 // index for spans + for item := range resultSpansSet { + references := []basemodel.OtelSpanRef{ + { + TraceId: item.TraceID, + SpanId: item.ParentID, + RefType: "CHILD_OF", + }, + } + + referencesStringArray := []string{} + for _, item := range references { + referencesStringArray = append(referencesStringArray, item.ToString()) + } + keys := make([]string, 0, len(item.TagMap)) + values := make([]string, 0, len(item.TagMap)) + + for k, v := range item.TagMap { + keys = append(keys, k) + values = append(values, v) + } + if item.Events == nil { + item.Events = []string{} + } + searchSpansResult[0].Events[i] = []interface{}{ + item.TimeUnixNano, + item.SpanID, + item.TraceID, + item.ServiceName, + item.Name, + strconv.Itoa(int(item.Kind)), + strconv.FormatInt(item.DurationNano, 10), + keys, + values, + referencesStringArray, + item.Events, + item.HasError, + } + i++ // increment index + } + return searchSpansResult, nil +} + +// buildSpanTrees builds trees of spans from a list of spans. +func buildSpanTrees(spansPtr *[]*model.SpanForTraceDetails) ([]*model.SpanForTraceDetails, error) { + + // Build a map of spanID to span for fast lookup + var roots []*model.SpanForTraceDetails + spans := *spansPtr + mapOfSpans := make(map[string]*model.SpanForTraceDetails, len(spans)) + + for _, span := range spans { + if span.ParentID == "" { + roots = append(roots, span) + } + mapOfSpans[span.SpanID] = span + } + + // Build the span tree by adding children to the parent spans + for _, span := range spans { + if span.ParentID == "" { + continue + } + parent := mapOfSpans[span.ParentID] + + // If the parent span is not found, add current span to list of roots + if parent == nil { + // zap.L().Debug("Parent Span not found parent_id: ", span.ParentID) + roots = append(roots, span) + span.ParentID = "" + continue + } + + span.ParentSpan = parent + parent.Children = append(parent.Children, span) + } + + return roots, nil +} + +// breadthFirstSearch performs a breadth-first search on the span tree to find the target span. +func breadthFirstSearch(spansPtr *model.SpanForTraceDetails, targetId string) (*model.SpanForTraceDetails, error) { + queue := []*model.SpanForTraceDetails{spansPtr} + visited := make(map[string]bool) + + for len(queue) > 0 { + current := queue[0] + visited[current.SpanID] = true + queue = queue[1:] + if current.SpanID == targetId { + return current, nil + } + + for _, child := range current.Children { + if ok := visited[child.SpanID]; !ok { + queue = append(queue, child) + } + } + } + return nil, nil +} diff --git a/signoz/ee/query-service/app/server.go b/signoz/ee/query-service/app/server.go new file mode 100644 index 0000000..c41788e --- /dev/null +++ b/signoz/ee/query-service/app/server.go @@ -0,0 +1,742 @@ +package app + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net" + "net/http" + _ "net/http/pprof" // http profiler + "os" + "regexp" + "time" + + "github.com/gorilla/handlers" + "github.com/gorilla/mux" + "github.com/jmoiron/sqlx" + + "github.com/rs/cors" + "github.com/soheilhy/cmux" + "go.signoz.io/signoz/ee/query-service/app/api" + "go.signoz.io/signoz/ee/query-service/app/db" + "go.signoz.io/signoz/ee/query-service/auth" + "go.signoz.io/signoz/ee/query-service/constants" + "go.signoz.io/signoz/ee/query-service/dao" + "go.signoz.io/signoz/ee/query-service/integrations/gateway" + "go.signoz.io/signoz/ee/query-service/interfaces" + baseauth "go.signoz.io/signoz/pkg/query-service/auth" + "go.signoz.io/signoz/pkg/query-service/model" + v3 "go.signoz.io/signoz/pkg/query-service/model/v3" + + licensepkg "go.signoz.io/signoz/ee/query-service/license" + "go.signoz.io/signoz/ee/query-service/usage" + + "go.signoz.io/signoz/pkg/query-service/agentConf" + baseapp "go.signoz.io/signoz/pkg/query-service/app" + "go.signoz.io/signoz/pkg/query-service/app/dashboards" + baseexplorer "go.signoz.io/signoz/pkg/query-service/app/explorer" + "go.signoz.io/signoz/pkg/query-service/app/integrations" + "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline" + "go.signoz.io/signoz/pkg/query-service/app/opamp" + opAmpModel "go.signoz.io/signoz/pkg/query-service/app/opamp/model" + "go.signoz.io/signoz/pkg/query-service/app/preferences" + "go.signoz.io/signoz/pkg/query-service/cache" + baseconst "go.signoz.io/signoz/pkg/query-service/constants" + "go.signoz.io/signoz/pkg/query-service/healthcheck" + basealm "go.signoz.io/signoz/pkg/query-service/integrations/alertManager" + baseint "go.signoz.io/signoz/pkg/query-service/interfaces" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + pqle "go.signoz.io/signoz/pkg/query-service/pqlEngine" + rules "go.signoz.io/signoz/pkg/query-service/rules" + "go.signoz.io/signoz/pkg/query-service/telemetry" + "go.signoz.io/signoz/pkg/query-service/utils" + "go.uber.org/zap" +) + +const AppDbEngine = "sqlite" + +type ServerOptions struct { + PromConfigPath string + SkipTopLvlOpsPath string + HTTPHostPort string + PrivateHostPort string + // alert specific params + DisableRules bool + RuleRepoURL string + PreferSpanMetrics bool + MaxIdleConns int + MaxOpenConns int + DialTimeout time.Duration + CacheConfigPath string + FluxInterval string + Cluster string + GatewayUrl string +} + +// Server runs HTTP api service +type Server struct { + serverOptions *ServerOptions + ruleManager *rules.Manager + + // public http router + httpConn net.Listener + httpServer *http.Server + + // private http + privateConn net.Listener + privateHTTP *http.Server + + // Usage manager + usageManager *usage.Manager + + opampServer *opamp.Server + + unavailableChannel chan healthcheck.Status +} + +// HealthCheckStatus returns health check status channel a client can subscribe to +func (s Server) HealthCheckStatus() chan healthcheck.Status { + return s.unavailableChannel +} + +// NewServer creates and initializes Server +func NewServer(serverOptions *ServerOptions) (*Server, error) { + + modelDao, err := dao.InitDao("sqlite", baseconst.RELATIONAL_DATASOURCE_PATH) + if err != nil { + return nil, err + } + + baseexplorer.InitWithDSN(baseconst.RELATIONAL_DATASOURCE_PATH) + + if err := preferences.InitDB(baseconst.RELATIONAL_DATASOURCE_PATH); err != nil { + return nil, err + } + + localDB, err := dashboards.InitDB(baseconst.RELATIONAL_DATASOURCE_PATH) + + if err != nil { + return nil, err + } + + localDB.SetMaxOpenConns(10) + + gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix) + if err != nil { + return nil, err + } + + // initiate license manager + lm, err := licensepkg.StartManager("sqlite", localDB) + if err != nil { + return nil, err + } + + // set license manager as feature flag provider in dao + modelDao.SetFlagProvider(lm) + readerReady := make(chan bool) + + var reader interfaces.DataConnector + storage := os.Getenv("STORAGE") + if storage == "clickhouse" { + zap.L().Info("Using ClickHouse as datastore ...") + qb := db.NewDataConnector( + localDB, + serverOptions.PromConfigPath, + lm, + serverOptions.MaxIdleConns, + serverOptions.MaxOpenConns, + serverOptions.DialTimeout, + serverOptions.Cluster, + ) + go qb.Start(readerReady) + reader = qb + } else { + return nil, fmt.Errorf("storage type: %s is not supported in query service", storage) + } + skipConfig := &basemodel.SkipConfig{} + if serverOptions.SkipTopLvlOpsPath != "" { + // read skip config + skipConfig, err = basemodel.ReadSkipConfig(serverOptions.SkipTopLvlOpsPath) + if err != nil { + return nil, err + } + } + + <-readerReady + rm, err := makeRulesManager(serverOptions.PromConfigPath, + baseconst.GetAlertManagerApiPrefix(), + serverOptions.RuleRepoURL, + localDB, + reader, + serverOptions.DisableRules, + lm) + + if err != nil { + return nil, err + } + + // initiate opamp + _, err = opAmpModel.InitDB(localDB) + if err != nil { + return nil, err + } + + integrationsController, err := integrations.NewController(localDB) + if err != nil { + return nil, fmt.Errorf( + "couldn't create integrations controller: %w", err, + ) + } + + // ingestion pipelines manager + logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController( + localDB, "sqlite", integrationsController.GetPipelinesForInstalledIntegrations, + ) + if err != nil { + return nil, err + } + + // initiate agent config handler + agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{ + DB: localDB, + DBEngine: AppDbEngine, + AgentFeatures: []agentConf.AgentFeature{logParsingPipelineController}, + }) + if err != nil { + return nil, err + } + + // start the usagemanager + usageManager, err := usage.New("sqlite", modelDao, lm.GetRepo(), reader.GetConn()) + if err != nil { + return nil, err + } + err = usageManager.Start() + if err != nil { + return nil, err + } + + telemetry.GetInstance().SetReader(reader) + telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey) + + var c cache.Cache + if serverOptions.CacheConfigPath != "" { + cacheOpts, err := cache.LoadFromYAMLCacheConfigFile(serverOptions.CacheConfigPath) + if err != nil { + return nil, err + } + c = cache.NewCache(cacheOpts) + } + + fluxInterval, err := time.ParseDuration(serverOptions.FluxInterval) + + if err != nil { + return nil, err + } + + apiOpts := api.APIHandlerOptions{ + DataConnector: reader, + SkipConfig: skipConfig, + PreferSpanMetrics: serverOptions.PreferSpanMetrics, + MaxIdleConns: serverOptions.MaxIdleConns, + MaxOpenConns: serverOptions.MaxOpenConns, + DialTimeout: serverOptions.DialTimeout, + AppDao: modelDao, + RulesManager: rm, + UsageManager: usageManager, + FeatureFlags: lm, + LicenseManager: lm, + IntegrationsController: integrationsController, + LogsParsingPipelineController: logParsingPipelineController, + Cache: c, + FluxInterval: fluxInterval, + Gateway: gatewayProxy, + } + + apiHandler, err := api.NewAPIHandler(apiOpts) + if err != nil { + return nil, err + } + + s := &Server{ + // logger: logger, + // tracer: tracer, + ruleManager: rm, + serverOptions: serverOptions, + unavailableChannel: make(chan healthcheck.Status), + usageManager: usageManager, + } + + httpServer, err := s.createPublicServer(apiHandler) + + if err != nil { + return nil, err + } + + s.httpServer = httpServer + + privateServer, err := s.createPrivateServer(apiHandler) + if err != nil { + return nil, err + } + + s.privateHTTP = privateServer + + s.opampServer = opamp.InitializeServer( + &opAmpModel.AllAgents, agentConfMgr, + ) + + return s, nil +} + +func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) { + + r := baseapp.NewRouter() + + r.Use(baseapp.LogCommentEnricher) + r.Use(setTimeoutMiddleware) + r.Use(s.analyticsMiddleware) + r.Use(loggingMiddlewarePrivate) + + apiHandler.RegisterPrivateRoutes(r) + + c := cors.New(cors.Options{ + //todo(amol): find out a way to add exact domain or + // ip here for alert manager + AllowedOrigins: []string{"*"}, + AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH"}, + AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "SIGNOZ-API-KEY"}, + }) + + handler := c.Handler(r) + handler = handlers.CompressHandler(handler) + + return &http.Server{ + Handler: handler, + }, nil +} + +func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, error) { + + r := baseapp.NewRouter() + + // add auth middleware + getUserFromRequest := func(r *http.Request) (*basemodel.UserPayload, error) { + user, err := auth.GetUserFromRequest(r, apiHandler) + + if err != nil { + return nil, err + } + + if user.User.OrgId == "" { + return nil, model.UnauthorizedError(errors.New("orgId is missing in the claims")) + } + + return user, nil + } + am := baseapp.NewAuthMiddleware(getUserFromRequest) + + r.Use(baseapp.LogCommentEnricher) + r.Use(setTimeoutMiddleware) + r.Use(s.analyticsMiddleware) + r.Use(loggingMiddleware) + + apiHandler.RegisterRoutes(r, am) + apiHandler.RegisterLogsRoutes(r, am) + apiHandler.RegisterIntegrationRoutes(r, am) + apiHandler.RegisterQueryRangeV3Routes(r, am) + apiHandler.RegisterQueryRangeV4Routes(r, am) + + c := cors.New(cors.Options{ + AllowedOrigins: []string{"*"}, + AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"}, + AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "cache-control"}, + }) + + handler := c.Handler(r) + + handler = handlers.CompressHandler(handler) + + return &http.Server{ + Handler: handler, + }, nil +} + +// loggingMiddleware is used for logging public api calls +func loggingMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + route := mux.CurrentRoute(r) + path, _ := route.GetPathTemplate() + startTime := time.Now() + next.ServeHTTP(w, r) + zap.L().Info(path, zap.Duration("timeTaken", time.Since(startTime)), zap.String("path", path)) + }) +} + +// loggingMiddlewarePrivate is used for logging private api calls +// from internal services like alert manager +func loggingMiddlewarePrivate(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + route := mux.CurrentRoute(r) + path, _ := route.GetPathTemplate() + startTime := time.Now() + next.ServeHTTP(w, r) + zap.L().Info(path, zap.Duration("timeTaken", time.Since(startTime)), zap.String("path", path), zap.Bool("tprivatePort", true)) + }) +} + +type loggingResponseWriter struct { + http.ResponseWriter + statusCode int +} + +func NewLoggingResponseWriter(w http.ResponseWriter) *loggingResponseWriter { + // WriteHeader(int) is not called if our response implicitly returns 200 OK, so + // we default to that status code. + return &loggingResponseWriter{w, http.StatusOK} +} + +func (lrw *loggingResponseWriter) WriteHeader(code int) { + lrw.statusCode = code + lrw.ResponseWriter.WriteHeader(code) +} + +// Flush implements the http.Flush interface. +func (lrw *loggingResponseWriter) Flush() { + lrw.ResponseWriter.(http.Flusher).Flush() +} + +func extractQueryRangeData(path string, r *http.Request) (map[string]interface{}, bool) { + pathToExtractBodyFromV3 := "/api/v3/query_range" + pathToExtractBodyFromV4 := "/api/v4/query_range" + + data := map[string]interface{}{} + var postData *v3.QueryRangeParamsV3 + + if (r.Method == "POST") && ((path == pathToExtractBodyFromV3) || (path == pathToExtractBodyFromV4)) { + if r.Body != nil { + bodyBytes, err := io.ReadAll(r.Body) + if err != nil { + return nil, false + } + r.Body.Close() // must close + r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) + json.Unmarshal(bodyBytes, &postData) + + } else { + return nil, false + } + + } else { + return nil, false + } + + referrer := r.Header.Get("Referer") + + dashboardMatched, err := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer) + if err != nil { + zap.L().Error("error while matching the referrer", zap.Error(err)) + } + alertMatched, err := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer) + if err != nil { + zap.L().Error("error while matching the alert: ", zap.Error(err)) + } + logsExplorerMatched, err := regexp.MatchString(`/logs/logs-explorer(?:\?.*)?$`, referrer) + if err != nil { + zap.L().Error("error while matching the logs explorer: ", zap.Error(err)) + } + traceExplorerMatched, err := regexp.MatchString(`/traces-explorer(?:\?.*)?$`, referrer) + if err != nil { + zap.L().Error("error while matching the trace explorer: ", zap.Error(err)) + } + + signozMetricsUsed := false + signozLogsUsed := false + signozTracesUsed := false + if postData != nil { + + if postData.CompositeQuery != nil { + data["queryType"] = postData.CompositeQuery.QueryType + data["panelType"] = postData.CompositeQuery.PanelType + + signozLogsUsed, signozMetricsUsed, signozTracesUsed = telemetry.GetInstance().CheckSigNozSignals(postData) + } + } + + if signozMetricsUsed || signozLogsUsed || signozTracesUsed { + if signozMetricsUsed { + telemetry.GetInstance().AddActiveMetricsUser() + } + if signozLogsUsed { + telemetry.GetInstance().AddActiveLogsUser() + } + if signozTracesUsed { + telemetry.GetInstance().AddActiveTracesUser() + } + data["metricsUsed"] = signozMetricsUsed + data["logsUsed"] = signozLogsUsed + data["tracesUsed"] = signozTracesUsed + userEmail, err := baseauth.GetEmailFromJwt(r.Context()) + if err == nil { + // switch case to set data["screen"] based on the referrer + switch { + case dashboardMatched: + data["screen"] = "panel" + case alertMatched: + data["screen"] = "alert" + case logsExplorerMatched: + data["screen"] = "logs-explorer" + case traceExplorerMatched: + data["screen"] = "traces-explorer" + default: + data["screen"] = "unknown" + return data, true + } + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail, true, false) + } + } + return data, true +} + +func getActiveLogs(path string, r *http.Request) { + // if path == "/api/v1/dashboards/{uuid}" { + // telemetry.GetInstance().AddActiveMetricsUser() + // } + if path == "/api/v1/logs" { + hasFilters := len(r.URL.Query().Get("q")) + if hasFilters > 0 { + telemetry.GetInstance().AddActiveLogsUser() + } + + } + +} + +func (s *Server) analyticsMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := baseauth.AttachJwtToContext(r.Context(), r) + r = r.WithContext(ctx) + route := mux.CurrentRoute(r) + path, _ := route.GetPathTemplate() + + queryRangeData, metadataExists := extractQueryRangeData(path, r) + getActiveLogs(path, r) + + lrw := NewLoggingResponseWriter(w) + next.ServeHTTP(lrw, r) + + data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode} + if metadataExists { + for key, value := range queryRangeData { + data[key] = value + } + } + + if _, ok := telemetry.EnabledPaths()[path]; ok { + userEmail, err := baseauth.GetEmailFromJwt(r.Context()) + if err == nil { + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_PATH, data, userEmail, true, false) + } + } + + }) +} + +func setTimeoutMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + var cancel context.CancelFunc + // check if route is not excluded + url := r.URL.Path + if _, ok := baseconst.TimeoutExcludedRoutes[url]; !ok { + ctx, cancel = context.WithTimeout(r.Context(), baseconst.ContextTimeout) + defer cancel() + } + + r = r.WithContext(ctx) + next.ServeHTTP(w, r) + }) +} + +// initListeners initialises listeners of the server +func (s *Server) initListeners() error { + // listen on public port + var err error + publicHostPort := s.serverOptions.HTTPHostPort + if publicHostPort == "" { + return fmt.Errorf("baseconst.HTTPHostPort is required") + } + + s.httpConn, err = net.Listen("tcp", publicHostPort) + if err != nil { + return err + } + + zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.serverOptions.HTTPHostPort)) + + // listen on private port to support internal services + privateHostPort := s.serverOptions.PrivateHostPort + + if privateHostPort == "" { + return fmt.Errorf("baseconst.PrivateHostPort is required") + } + + s.privateConn, err = net.Listen("tcp", privateHostPort) + if err != nil { + return err + } + zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.serverOptions.PrivateHostPort)) + + return nil +} + +// Start listening on http and private http port concurrently +func (s *Server) Start() error { + + // initiate rule manager first + if !s.serverOptions.DisableRules { + s.ruleManager.Start() + } else { + zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE") + } + + err := s.initListeners() + if err != nil { + return err + } + + var httpPort int + if port, err := utils.GetPort(s.httpConn.Addr()); err == nil { + httpPort = port + } + + go func() { + zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.serverOptions.HTTPHostPort)) + + switch err := s.httpServer.Serve(s.httpConn); err { + case nil, http.ErrServerClosed, cmux.ErrListenerClosed: + // normal exit, nothing to do + default: + zap.L().Error("Could not start HTTP server", zap.Error(err)) + } + s.unavailableChannel <- healthcheck.Unavailable + }() + + go func() { + zap.L().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort)) + + err = http.ListenAndServe(baseconst.DebugHttpPort, nil) + if err != nil { + zap.L().Error("Could not start pprof server", zap.Error(err)) + } + }() + + var privatePort int + if port, err := utils.GetPort(s.privateConn.Addr()); err == nil { + privatePort = port + } + + go func() { + zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.serverOptions.PrivateHostPort)) + + switch err := s.privateHTTP.Serve(s.privateConn); err { + case nil, http.ErrServerClosed, cmux.ErrListenerClosed: + // normal exit, nothing to do + zap.L().Info("private http server closed") + default: + zap.L().Error("Could not start private HTTP server", zap.Error(err)) + } + + s.unavailableChannel <- healthcheck.Unavailable + + }() + + go func() { + zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint)) + err := s.opampServer.Start(baseconst.OpAmpWsEndpoint) + if err != nil { + zap.L().Error("opamp ws server failed to start", zap.Error(err)) + s.unavailableChannel <- healthcheck.Unavailable + } + }() + + return nil +} + +func (s *Server) Stop() error { + if s.httpServer != nil { + if err := s.httpServer.Shutdown(context.Background()); err != nil { + return err + } + } + + if s.privateHTTP != nil { + if err := s.privateHTTP.Shutdown(context.Background()); err != nil { + return err + } + } + + s.opampServer.Stop() + + if s.ruleManager != nil { + s.ruleManager.Stop() + } + + // stop usage manager + s.usageManager.Stop() + + return nil +} + +func makeRulesManager( + promConfigPath, + alertManagerURL string, + ruleRepoURL string, + db *sqlx.DB, + ch baseint.Reader, + disableRules bool, + fm baseint.FeatureLookup) (*rules.Manager, error) { + + // create engine + pqle, err := pqle.FromConfigPath(promConfigPath) + if err != nil { + return nil, fmt.Errorf("failed to create pql engine : %v", err) + } + + // notifier opts + notifierOpts := basealm.NotifierOptions{ + QueueCapacity: 10000, + Timeout: 1 * time.Second, + AlertManagerURLs: []string{alertManagerURL}, + } + + // create manager opts + managerOpts := &rules.ManagerOptions{ + NotifierOpts: notifierOpts, + Queriers: &rules.Queriers{ + PqlEngine: pqle, + Ch: ch.GetConn(), + }, + RepoURL: ruleRepoURL, + DBConn: db, + Context: context.Background(), + Logger: nil, + DisableRules: disableRules, + FeatureFlags: fm, + Reader: ch, + } + + // create Manager + manager, err := rules.NewManager(managerOpts) + if err != nil { + return nil, fmt.Errorf("rule manager error: %v", err) + } + + zap.L().Info("rules manager is ready") + + return manager, nil +} diff --git a/signoz/ee/query-service/auth/auth.go b/signoz/ee/query-service/auth/auth.go new file mode 100644 index 0000000..d45d050 --- /dev/null +++ b/signoz/ee/query-service/auth/auth.go @@ -0,0 +1,56 @@ +package auth + +import ( + "context" + "fmt" + "net/http" + "time" + + "go.signoz.io/signoz/ee/query-service/app/api" + baseauth "go.signoz.io/signoz/pkg/query-service/auth" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.signoz.io/signoz/pkg/query-service/telemetry" + + "go.uber.org/zap" +) + +func GetUserFromRequest(r *http.Request, apiHandler *api.APIHandler) (*basemodel.UserPayload, error) { + patToken := r.Header.Get("SIGNOZ-API-KEY") + if len(patToken) > 0 { + zap.L().Debug("Received a non-zero length PAT token") + ctx := context.Background() + dao := apiHandler.AppDao() + + pat, err := dao.GetPAT(ctx, patToken) + if err == nil && pat != nil { + zap.L().Debug("Found valid PAT: ", zap.Any("pat", pat)) + if pat.ExpiresAt < time.Now().Unix() && pat.ExpiresAt != 0 { + zap.L().Info("PAT has expired: ", zap.Any("pat", pat)) + return nil, fmt.Errorf("PAT has expired") + } + group, apiErr := dao.GetGroupByName(ctx, pat.Role) + if apiErr != nil { + zap.L().Error("Error while getting group for PAT: ", zap.Any("apiErr", apiErr)) + return nil, apiErr + } + user, err := dao.GetUser(ctx, pat.UserID) + if err != nil { + zap.L().Error("Error while getting user for PAT: ", zap.Error(err)) + return nil, err + } + telemetry.GetInstance().SetPatTokenUser() + dao.UpdatePATLastUsed(ctx, patToken, time.Now().Unix()) + user.User.GroupId = group.Id + user.User.Id = pat.Id + return &basemodel.UserPayload{ + User: user.User, + Role: pat.Role, + }, nil + } + if err != nil { + zap.L().Error("Error while getting user for PAT: ", zap.Error(err)) + return nil, err + } + } + return baseauth.GetUserFromRequest(r) +} diff --git a/signoz/ee/query-service/constants/constants.go b/signoz/ee/query-service/constants/constants.go new file mode 100644 index 0000000..cc4bb07 --- /dev/null +++ b/signoz/ee/query-service/constants/constants.go @@ -0,0 +1,32 @@ +package constants + +import ( + "os" +) + +const ( + DefaultSiteURL = "https://localhost:3301" +) + +var LicenseSignozIo = "https://license.signoz.io/api/v1" +var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "") +var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "") +var SpanRenderLimitStr = GetOrDefaultEnv("SPAN_RENDER_LIMIT", "2500") +var MaxSpansInTraceStr = GetOrDefaultEnv("MAX_SPANS_IN_TRACE", "250000") + +func GetOrDefaultEnv(key string, fallback string) string { + v := os.Getenv(key) + if len(v) == 0 { + return fallback + } + return v +} + +// constant functions that override env vars + +// GetDefaultSiteURL returns default site url, primarily +// used to send saml request and allowing backend to +// handle http redirect +func GetDefaultSiteURL() string { + return GetOrDefaultEnv("SIGNOZ_SITE_URL", DefaultSiteURL) +} diff --git a/signoz/ee/query-service/dao/factory.go b/signoz/ee/query-service/dao/factory.go new file mode 100644 index 0000000..f623e17 --- /dev/null +++ b/signoz/ee/query-service/dao/factory.go @@ -0,0 +1,18 @@ +package dao + +import ( + "fmt" + + "go.signoz.io/signoz/ee/query-service/dao/sqlite" +) + +func InitDao(engine, path string) (ModelDao, error) { + + switch engine { + case "sqlite": + return sqlite.InitDB(path) + default: + return nil, fmt.Errorf("qsdb type: %s is not supported in query service", engine) + } + +} diff --git a/signoz/ee/query-service/dao/interface.go b/signoz/ee/query-service/dao/interface.go new file mode 100644 index 0000000..2fc8146 --- /dev/null +++ b/signoz/ee/query-service/dao/interface.go @@ -0,0 +1,44 @@ +package dao + +import ( + "context" + "net/url" + + "github.com/google/uuid" + "github.com/jmoiron/sqlx" + "go.signoz.io/signoz/ee/query-service/model" + basedao "go.signoz.io/signoz/pkg/query-service/dao" + baseint "go.signoz.io/signoz/pkg/query-service/interfaces" + basemodel "go.signoz.io/signoz/pkg/query-service/model" +) + +type ModelDao interface { + basedao.ModelDao + + // SetFlagProvider sets the feature lookup provider + SetFlagProvider(flags baseint.FeatureLookup) + + DB() *sqlx.DB + + // auth methods + CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError) + PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (redirectURL string, apierr basemodel.BaseApiError) + GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*model.OrgDomain, error) + + // org domain (auth domains) CRUD ops + ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, basemodel.BaseApiError) + GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, basemodel.BaseApiError) + CreateDomain(ctx context.Context, d *model.OrgDomain) basemodel.BaseApiError + UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError + DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError + GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, basemodel.BaseApiError) + + CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basemodel.BaseApiError) + UpdatePAT(ctx context.Context, p model.PAT, id string) basemodel.BaseApiError + GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError) + UpdatePATLastUsed(ctx context.Context, pat string, lastUsed int64) basemodel.BaseApiError + GetPATByID(ctx context.Context, id string) (*model.PAT, basemodel.BaseApiError) + GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, basemodel.BaseApiError) + ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApiError) + RevokePAT(ctx context.Context, id string, userID string) basemodel.BaseApiError +} diff --git a/signoz/ee/query-service/dao/sqlite/auth.go b/signoz/ee/query-service/dao/sqlite/auth.go new file mode 100644 index 0000000..b8bc5e0 --- /dev/null +++ b/signoz/ee/query-service/dao/sqlite/auth.go @@ -0,0 +1,200 @@ +package sqlite + +import ( + "context" + "fmt" + "net/url" + "strings" + "time" + + "github.com/google/uuid" + "go.signoz.io/signoz/ee/query-service/constants" + "go.signoz.io/signoz/ee/query-service/model" + baseauth "go.signoz.io/signoz/pkg/query-service/auth" + baseconst "go.signoz.io/signoz/pkg/query-service/constants" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.signoz.io/signoz/pkg/query-service/utils" + "go.uber.org/zap" +) + +func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (*basemodel.User, basemodel.BaseApiError) { + // get auth domain from email domain + domain, apierr := m.GetDomainByEmail(ctx, email) + if apierr != nil { + zap.L().Error("failed to get domain from email", zap.Error(apierr)) + return nil, model.InternalErrorStr("failed to get domain from email") + } + if domain == nil { + zap.L().Error("email domain does not match any authenticated domain", zap.String("email", email)) + return nil, model.InternalErrorStr("email domain does not match any authenticated domain") + } + + hash, err := baseauth.PasswordHash(utils.GeneratePassowrd()) + if err != nil { + zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err)) + return nil, model.InternalErrorStr("failed to generate password hash") + } + + group, apiErr := m.GetGroupByName(ctx, baseconst.ViewerGroup) + if apiErr != nil { + zap.L().Error("GetGroupByName failed", zap.Error(apiErr)) + return nil, apiErr + } + + user := &basemodel.User{ + Id: uuid.NewString(), + Name: "", + Email: email, + Password: hash, + CreatedAt: time.Now().Unix(), + ProfilePictureURL: "", // Currently unused + GroupId: group.Id, + OrgId: domain.OrgId, + } + + user, apiErr = m.CreateUser(ctx, user, false) + if apiErr != nil { + zap.L().Error("CreateUser failed", zap.Error(apiErr)) + return nil, apiErr + } + + return user, nil + +} + +// PrepareSsoRedirect prepares redirect page link after SSO response +// is successfully parsed (i.e. valid email is available) +func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (redirectURL string, apierr basemodel.BaseApiError) { + + userPayload, apierr := m.GetUserByEmail(ctx, email) + if !apierr.IsNil() { + zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error())) + return "", model.BadRequestStr("invalid user email received from the auth provider") + } + + user := &basemodel.User{} + + if userPayload == nil { + newUser, apiErr := m.createUserForSAMLRequest(ctx, email) + user = newUser + if apiErr != nil { + zap.L().Error("failed to create user with email received from auth provider", zap.Error(apiErr)) + return "", apiErr + } + } else { + user = &userPayload.User + } + + tokenStore, err := baseauth.GenerateJWTForUser(user) + if err != nil { + zap.L().Error("failed to generate token for SSO login user", zap.Error(err)) + return "", model.InternalErrorStr("failed to generate token for the user") + } + + return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s", + redirectUri, + tokenStore.AccessJwt, + user.Id, + tokenStore.RefreshJwt), nil +} + +func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError) { + domain, apierr := m.GetDomainByEmail(ctx, email) + if apierr != nil { + return false, apierr + } + + if domain != nil && domain.SsoEnabled { + // sso is enabled, check if the user has admin role + userPayload, baseapierr := m.GetUserByEmail(ctx, email) + + if baseapierr != nil || userPayload == nil { + return false, baseapierr + } + + if userPayload.Role != baseconst.AdminGroup { + return false, model.BadRequest(fmt.Errorf("auth method not supported")) + } + + } + + return true, nil +} + +// PrecheckLogin is called when the login or signup page is loaded +// to check sso login is to be prompted +func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*basemodel.PrecheckResponse, basemodel.BaseApiError) { + + // assume user is valid unless proven otherwise + resp := &basemodel.PrecheckResponse{IsUser: true, CanSelfRegister: false} + + // check if email is a valid user + userPayload, baseApiErr := m.GetUserByEmail(ctx, email) + if baseApiErr != nil { + return resp, baseApiErr + } + + if userPayload == nil { + resp.IsUser = false + } + + ssoAvailable := true + err := m.checkFeature(model.SSO) + if err != nil { + switch err.(type) { + case basemodel.ErrFeatureUnavailable: + // do nothing, just skip sso + ssoAvailable = false + default: + zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err)) + return resp, model.BadRequestStr(err.Error()) + } + } + + if ssoAvailable { + + resp.IsUser = true + + // find domain from email + orgDomain, apierr := m.GetDomainByEmail(ctx, email) + if apierr != nil { + var emailDomain string + emailComponents := strings.Split(email, "@") + if len(emailComponents) > 0 { + emailDomain = emailComponents[1] + } + zap.L().Error("failed to get org domain from email", zap.String("emailDomain", emailDomain), zap.Error(apierr.ToError())) + return resp, apierr + } + + if orgDomain != nil && orgDomain.SsoEnabled { + // saml is enabled for this domain, lets prepare sso url + + if sourceUrl == "" { + sourceUrl = constants.GetDefaultSiteURL() + } + + // parse source url that generated the login request + var err error + escapedUrl, _ := url.QueryUnescape(sourceUrl) + siteUrl, err := url.Parse(escapedUrl) + if err != nil { + zap.L().Error("failed to parse referer", zap.Error(err)) + return resp, model.InternalError(fmt.Errorf("failed to generate login request")) + } + + // build Idp URL that will authenticat the user + // the front-end will redirect user to this url + resp.SsoUrl, err = orgDomain.BuildSsoUrl(siteUrl) + + if err != nil { + zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err)) + return resp, model.InternalError(err) + } + + // set SSO to true, as the url is generated correctly + resp.SSO = true + } + } + return resp, nil +} diff --git a/signoz/ee/query-service/dao/sqlite/domain.go b/signoz/ee/query-service/dao/sqlite/domain.go new file mode 100644 index 0000000..fbaa4fe --- /dev/null +++ b/signoz/ee/query-service/dao/sqlite/domain.go @@ -0,0 +1,253 @@ +package sqlite + +import ( + "context" + "database/sql" + "encoding/json" + "fmt" + "net/url" + "strings" + "time" + + "github.com/google/uuid" + "go.signoz.io/signoz/ee/query-service/model" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.uber.org/zap" +) + +// StoredDomain represents stored database record for org domain + +type StoredDomain struct { + Id uuid.UUID `db:"id"` + Name string `db:"name"` + OrgId string `db:"org_id"` + Data string `db:"data"` + CreatedAt int64 `db:"created_at"` + UpdatedAt int64 `db:"updated_at"` +} + +// GetDomainFromSsoResponse uses relay state received from IdP to fetch +// user domain. The domain is further used to process validity of the response. +// when sending login request to IdP we send relay state as URL (site url) +// with domainId or domainName as query parameter. +func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*model.OrgDomain, error) { + // derive domain id from relay state now + var domainIdStr string + var domainNameStr string + var domain *model.OrgDomain + + for k, v := range relayState.Query() { + if k == "domainId" && len(v) > 0 { + domainIdStr = strings.Replace(v[0], ":", "-", -1) + } + if k == "domainName" && len(v) > 0 { + domainNameStr = v[0] + } + } + + if domainIdStr != "" { + domainId, err := uuid.Parse(domainIdStr) + if err != nil { + zap.L().Error("failed to parse domainId from relay state", zap.Error(err)) + return nil, fmt.Errorf("failed to parse domainId from IdP response") + } + + domain, err = m.GetDomain(ctx, domainId) + if (err != nil) || domain == nil { + zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err)) + return nil, fmt.Errorf("invalid credentials") + } + } + + if domainNameStr != "" { + + domainFromDB, err := m.GetDomainByName(ctx, domainNameStr) + domain = domainFromDB + if (err != nil) || domain == nil { + zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err)) + return nil, fmt.Errorf("invalid credentials") + } + } + if domain != nil { + return domain, nil + } + + return nil, fmt.Errorf("failed to find domain received in IdP response") +} + +// GetDomainByName returns org domain for a given domain name +func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*model.OrgDomain, basemodel.BaseApiError) { + + stored := StoredDomain{} + err := m.DB().Get(&stored, `SELECT * FROM org_domains WHERE name=$1 LIMIT 1`, name) + + if err != nil { + if err == sql.ErrNoRows { + return nil, model.BadRequest(fmt.Errorf("invalid domain name")) + } + return nil, model.InternalError(err) + } + + domain := &model.OrgDomain{Id: stored.Id, Name: stored.Name, OrgId: stored.OrgId} + if err := domain.LoadConfig(stored.Data); err != nil { + return nil, model.InternalError(err) + } + return domain, nil +} + +// GetDomain returns org domain for a given domain id +func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, basemodel.BaseApiError) { + + stored := StoredDomain{} + err := m.DB().Get(&stored, `SELECT * FROM org_domains WHERE id=$1 LIMIT 1`, id) + + if err != nil { + if err == sql.ErrNoRows { + return nil, model.BadRequest(fmt.Errorf("invalid domain id")) + } + return nil, model.InternalError(err) + } + + domain := &model.OrgDomain{Id: stored.Id, Name: stored.Name, OrgId: stored.OrgId} + if err := domain.LoadConfig(stored.Data); err != nil { + return nil, model.InternalError(err) + } + return domain, nil +} + +// ListDomains gets the list of auth domains by org id +func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, basemodel.BaseApiError) { + domains := []model.OrgDomain{} + + stored := []StoredDomain{} + err := m.DB().SelectContext(ctx, &stored, `SELECT * FROM org_domains WHERE org_id=$1`, orgId) + + if err != nil { + if err == sql.ErrNoRows { + return []model.OrgDomain{}, nil + } + return nil, model.InternalError(err) + } + + for _, s := range stored { + domain := model.OrgDomain{Id: s.Id, Name: s.Name, OrgId: s.OrgId} + if err := domain.LoadConfig(s.Data); err != nil { + zap.L().Error("ListDomains() failed", zap.Error(err)) + } + domains = append(domains, domain) + } + + return domains, nil +} + +// CreateDomain creates a new auth domain +func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError { + + if domain.Id == uuid.Nil { + domain.Id = uuid.New() + } + + if domain.OrgId == "" || domain.Name == "" { + return model.BadRequest(fmt.Errorf("domain creation failed, missing fields: OrgId, Name ")) + } + + configJson, err := json.Marshal(domain) + if err != nil { + zap.L().Error("failed to unmarshal domain config", zap.Error(err)) + return model.InternalError(fmt.Errorf("domain creation failed")) + } + + _, err = m.DB().ExecContext(ctx, + "INSERT INTO org_domains (id, name, org_id, data, created_at, updated_at) VALUES ($1, $2, $3, $4, $5, $6)", + domain.Id, + domain.Name, + domain.OrgId, + configJson, + time.Now().Unix(), + time.Now().Unix()) + + if err != nil { + zap.L().Error("failed to insert domain in db", zap.Error(err)) + return model.InternalError(fmt.Errorf("domain creation failed")) + } + + return nil +} + +// UpdateDomain updates stored config params for a domain +func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError { + + if domain.Id == uuid.Nil { + zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null"))) + return model.InternalError(fmt.Errorf("domain update failed")) + } + + configJson, err := json.Marshal(domain) + if err != nil { + zap.L().Error("domain update failed", zap.Error(err)) + return model.InternalError(fmt.Errorf("domain update failed")) + } + + _, err = m.DB().ExecContext(ctx, + "UPDATE org_domains SET data = $1, updated_at = $2 WHERE id = $3", + configJson, + time.Now().Unix(), + domain.Id) + + if err != nil { + zap.L().Error("domain update failed", zap.Error(err)) + return model.InternalError(fmt.Errorf("domain update failed")) + } + + return nil +} + +// DeleteDomain deletes an org domain +func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError { + + if id == uuid.Nil { + zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null"))) + return model.InternalError(fmt.Errorf("domain delete failed")) + } + + _, err := m.DB().ExecContext(ctx, + "DELETE FROM org_domains WHERE id = $1", + id) + + if err != nil { + zap.L().Error("domain delete failed", zap.Error(err)) + return model.InternalError(fmt.Errorf("domain delete failed")) + } + + return nil +} + +func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, basemodel.BaseApiError) { + + if email == "" { + return nil, model.BadRequest(fmt.Errorf("could not find auth domain, missing fields: email ")) + } + + components := strings.Split(email, "@") + if len(components) < 2 { + return nil, model.BadRequest(fmt.Errorf("invalid email address")) + } + + parsedDomain := components[1] + + stored := StoredDomain{} + err := m.DB().Get(&stored, `SELECT * FROM org_domains WHERE name=$1 LIMIT 1`, parsedDomain) + + if err != nil { + if err == sql.ErrNoRows { + return nil, nil + } + return nil, model.InternalError(err) + } + + domain := &model.OrgDomain{Id: stored.Id, Name: stored.Name, OrgId: stored.OrgId} + if err := domain.LoadConfig(stored.Data); err != nil { + return nil, model.InternalError(err) + } + return domain, nil +} diff --git a/signoz/ee/query-service/dao/sqlite/modelDao.go b/signoz/ee/query-service/dao/sqlite/modelDao.go new file mode 100644 index 0000000..02b4367 --- /dev/null +++ b/signoz/ee/query-service/dao/sqlite/modelDao.go @@ -0,0 +1,144 @@ +package sqlite + +import ( + "fmt" + + "github.com/jmoiron/sqlx" + basedao "go.signoz.io/signoz/pkg/query-service/dao" + basedsql "go.signoz.io/signoz/pkg/query-service/dao/sqlite" + baseint "go.signoz.io/signoz/pkg/query-service/interfaces" + "go.uber.org/zap" +) + +type modelDao struct { + *basedsql.ModelDaoSqlite + flags baseint.FeatureLookup +} + +// SetFlagProvider sets the feature lookup provider +func (m *modelDao) SetFlagProvider(flags baseint.FeatureLookup) { + m.flags = flags +} + +// CheckFeature confirms if a feature is available +func (m *modelDao) checkFeature(key string) error { + if m.flags == nil { + return fmt.Errorf("flag provider not set") + } + + return m.flags.CheckFeature(key) +} + +func columnExists(db *sqlx.DB, tableName, columnName string) bool { + query := fmt.Sprintf("PRAGMA table_info(%s);", tableName) + rows, err := db.Query(query) + if err != nil { + zap.L().Error("Failed to query table info", zap.Error(err)) + return false + } + defer rows.Close() + + var ( + cid int + name string + ctype string + notnull int + dflt_value *string + pk int + ) + for rows.Next() { + err := rows.Scan(&cid, &name, &ctype, ¬null, &dflt_value, &pk) + if err != nil { + zap.L().Error("Failed to scan table info", zap.Error(err)) + return false + } + if name == columnName { + return true + } + } + err = rows.Err() + if err != nil { + zap.L().Error("Failed to scan table info", zap.Error(err)) + return false + } + return false +} + +// InitDB creates and extends base model DB repository +func InitDB(dataSourceName string) (*modelDao, error) { + dao, err := basedsql.InitDB(dataSourceName) + if err != nil { + return nil, err + } + // set package variable so dependent base methods (e.g. AuthCache) will work + basedao.SetDB(dao) + m := &modelDao{ModelDaoSqlite: dao} + + table_schema := ` + PRAGMA foreign_keys = ON; + CREATE TABLE IF NOT EXISTS org_domains( + id TEXT PRIMARY KEY, + org_id TEXT NOT NULL, + name VARCHAR(50) NOT NULL UNIQUE, + created_at INTEGER NOT NULL, + updated_at INTEGER, + data TEXT NOT NULL, + FOREIGN KEY(org_id) REFERENCES organizations(id) + ); + CREATE TABLE IF NOT EXISTS personal_access_tokens ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + role TEXT NOT NULL, + user_id TEXT NOT NULL, + token TEXT NOT NULL UNIQUE, + name TEXT NOT NULL, + created_at INTEGER NOT NULL, + expires_at INTEGER NOT NULL, + updated_at INTEGER NOT NULL, + last_used INTEGER NOT NULL, + revoked BOOLEAN NOT NULL, + updated_by_user_id TEXT NOT NULL, + FOREIGN KEY(user_id) REFERENCES users(id) + ); + ` + + _, err = m.DB().Exec(table_schema) + if err != nil { + return nil, fmt.Errorf("error in creating tables: %v", err.Error()) + } + + if !columnExists(m.DB(), "personal_access_tokens", "role") { + _, err = m.DB().Exec("ALTER TABLE personal_access_tokens ADD COLUMN role TEXT NOT NULL DEFAULT 'ADMIN';") + if err != nil { + return nil, fmt.Errorf("error in adding column: %v", err.Error()) + } + } + if !columnExists(m.DB(), "personal_access_tokens", "updated_at") { + _, err = m.DB().Exec("ALTER TABLE personal_access_tokens ADD COLUMN updated_at INTEGER NOT NULL DEFAULT 0;") + if err != nil { + return nil, fmt.Errorf("error in adding column: %v", err.Error()) + } + } + if !columnExists(m.DB(), "personal_access_tokens", "last_used") { + _, err = m.DB().Exec("ALTER TABLE personal_access_tokens ADD COLUMN last_used INTEGER NOT NULL DEFAULT 0;") + if err != nil { + return nil, fmt.Errorf("error in adding column: %v", err.Error()) + } + } + if !columnExists(m.DB(), "personal_access_tokens", "revoked") { + _, err = m.DB().Exec("ALTER TABLE personal_access_tokens ADD COLUMN revoked BOOLEAN NOT NULL DEFAULT FALSE;") + if err != nil { + return nil, fmt.Errorf("error in adding column: %v", err.Error()) + } + } + if !columnExists(m.DB(), "personal_access_tokens", "updated_by_user_id") { + _, err = m.DB().Exec("ALTER TABLE personal_access_tokens ADD COLUMN updated_by_user_id TEXT NOT NULL DEFAULT '';") + if err != nil { + return nil, fmt.Errorf("error in adding column: %v", err.Error()) + } + } + return m, nil +} + +func (m *modelDao) DB() *sqlx.DB { + return m.ModelDaoSqlite.DB() +} diff --git a/signoz/ee/query-service/dao/sqlite/pat.go b/signoz/ee/query-service/dao/sqlite/pat.go new file mode 100644 index 0000000..75169db --- /dev/null +++ b/signoz/ee/query-service/dao/sqlite/pat.go @@ -0,0 +1,199 @@ +package sqlite + +import ( + "context" + "fmt" + "strconv" + "time" + + "go.signoz.io/signoz/ee/query-service/model" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.uber.org/zap" +) + +func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basemodel.BaseApiError) { + result, err := m.DB().ExecContext(ctx, + "INSERT INTO personal_access_tokens (user_id, token, role, name, created_at, expires_at, updated_at, updated_by_user_id, last_used, revoked) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", + p.UserID, + p.Token, + p.Role, + p.Name, + p.CreatedAt, + p.ExpiresAt, + p.UpdatedAt, + p.UpdatedByUserID, + p.LastUsed, + p.Revoked, + ) + if err != nil { + zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err)) + return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed")) + } + id, err := result.LastInsertId() + if err != nil { + zap.L().Error("Failed to get last inserted id, err: %v", zap.Error(err)) + return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed")) + } + p.Id = strconv.Itoa(int(id)) + createdByUser, _ := m.GetUser(ctx, p.UserID) + if createdByUser == nil { + p.CreatedByUser = model.User{ + NotFound: true, + } + } else { + p.CreatedByUser = model.User{ + Id: createdByUser.Id, + Name: createdByUser.Name, + Email: createdByUser.Email, + CreatedAt: createdByUser.CreatedAt, + ProfilePictureURL: createdByUser.ProfilePictureURL, + NotFound: false, + } + } + return p, nil +} + +func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) basemodel.BaseApiError { + _, err := m.DB().ExecContext(ctx, + "UPDATE personal_access_tokens SET role=$1, name=$2, updated_at=$3, updated_by_user_id=$4 WHERE id=$5 and revoked=false;", + p.Role, + p.Name, + p.UpdatedAt, + p.UpdatedByUserID, + id) + if err != nil { + zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err)) + return model.InternalError(fmt.Errorf("PAT update failed")) + } + return nil +} + +func (m *modelDao) UpdatePATLastUsed(ctx context.Context, token string, lastUsed int64) basemodel.BaseApiError { + _, err := m.DB().ExecContext(ctx, + "UPDATE personal_access_tokens SET last_used=$1 WHERE token=$2 and revoked=false;", + lastUsed, + token) + if err != nil { + zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err)) + return model.InternalError(fmt.Errorf("PAT last used update failed")) + } + return nil +} + +func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApiError) { + pats := []model.PAT{} + + if err := m.DB().Select(&pats, "SELECT * FROM personal_access_tokens WHERE revoked=false ORDER by updated_at DESC;"); err != nil { + zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err)) + return nil, model.InternalError(fmt.Errorf("failed to fetch PATs")) + } + for i := range pats { + createdByUser, _ := m.GetUser(ctx, pats[i].UserID) + if createdByUser == nil { + pats[i].CreatedByUser = model.User{ + NotFound: true, + } + } else { + pats[i].CreatedByUser = model.User{ + Id: createdByUser.Id, + Name: createdByUser.Name, + Email: createdByUser.Email, + CreatedAt: createdByUser.CreatedAt, + ProfilePictureURL: createdByUser.ProfilePictureURL, + NotFound: false, + } + } + + updatedByUser, _ := m.GetUser(ctx, pats[i].UpdatedByUserID) + if updatedByUser == nil { + pats[i].UpdatedByUser = model.User{ + NotFound: true, + } + } else { + pats[i].UpdatedByUser = model.User{ + Id: updatedByUser.Id, + Name: updatedByUser.Name, + Email: updatedByUser.Email, + CreatedAt: updatedByUser.CreatedAt, + ProfilePictureURL: updatedByUser.ProfilePictureURL, + NotFound: false, + } + } + } + return pats, nil +} + +func (m *modelDao) RevokePAT(ctx context.Context, id string, userID string) basemodel.BaseApiError { + updatedAt := time.Now().Unix() + _, err := m.DB().ExecContext(ctx, + "UPDATE personal_access_tokens SET revoked=true, updated_by_user_id = $1, updated_at=$2 WHERE id=$3", + userID, updatedAt, id) + if err != nil { + zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err)) + return model.InternalError(fmt.Errorf("PAT revoke failed")) + } + return nil +} + +func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemodel.BaseApiError) { + pats := []model.PAT{} + + if err := m.DB().Select(&pats, `SELECT * FROM personal_access_tokens WHERE token=? and revoked=false;`, token); err != nil { + return nil, model.InternalError(fmt.Errorf("failed to fetch PAT")) + } + + if len(pats) != 1 { + return nil, &model.ApiError{ + Typ: model.ErrorInternal, + Err: fmt.Errorf("found zero or multiple PATs with same token, %s", token), + } + } + + return &pats[0], nil +} + +func (m *modelDao) GetPATByID(ctx context.Context, id string) (*model.PAT, basemodel.BaseApiError) { + pats := []model.PAT{} + + if err := m.DB().Select(&pats, `SELECT * FROM personal_access_tokens WHERE id=? and revoked=false;`, id); err != nil { + return nil, model.InternalError(fmt.Errorf("failed to fetch PAT")) + } + + if len(pats) != 1 { + return nil, &model.ApiError{ + Typ: model.ErrorInternal, + Err: fmt.Errorf("found zero or multiple PATs with same token"), + } + } + + return &pats[0], nil +} + +// deprecated +func (m *modelDao) GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, basemodel.BaseApiError) { + users := []basemodel.UserPayload{} + + query := `SELECT + u.id, + u.name, + u.email, + u.password, + u.created_at, + u.profile_picture_url, + u.org_id, + u.group_id + FROM users u, personal_access_tokens p + WHERE u.id = p.user_id and p.token=? and p.expires_at >= strftime('%s', 'now');` + + if err := m.DB().Select(&users, query, token); err != nil { + return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err)) + } + + if len(users) != 1 { + return nil, &model.ApiError{ + Typ: model.ErrorInternal, + Err: fmt.Errorf("found zero or multiple users with same PAT token"), + } + } + return &users[0], nil +} diff --git a/signoz/ee/query-service/integrations/gateway/noop.go b/signoz/ee/query-service/integrations/gateway/noop.go new file mode 100644 index 0000000..ccb5d32 --- /dev/null +++ b/signoz/ee/query-service/integrations/gateway/noop.go @@ -0,0 +1,9 @@ +package gateway + +import ( + "net/http/httputil" +) + +func NewNoopProxy() (*httputil.ReverseProxy, error) { + return &httputil.ReverseProxy{}, nil +} diff --git a/signoz/ee/query-service/integrations/gateway/proxy.go b/signoz/ee/query-service/integrations/gateway/proxy.go new file mode 100644 index 0000000..8b225c4 --- /dev/null +++ b/signoz/ee/query-service/integrations/gateway/proxy.go @@ -0,0 +1,66 @@ +package gateway + +import ( + "net/http" + "net/http/httputil" + "net/url" + "path" + "strings" +) + +const ( + RoutePrefix string = "/api/gateway" + AllowedPrefix string = "/v1/workspaces/me" +) + +type proxy struct { + url *url.URL + stripPath string +} + +func NewProxy(u string, stripPath string) (*httputil.ReverseProxy, error) { + url, err := url.Parse(u) + if err != nil { + return nil, err + } + + proxy := &proxy{url: url, stripPath: stripPath} + + return &httputil.ReverseProxy{ + Rewrite: proxy.rewrite, + ModifyResponse: proxy.modifyResponse, + ErrorHandler: proxy.errorHandler, + }, nil +} + +func (p *proxy) rewrite(pr *httputil.ProxyRequest) { + pr.SetURL(p.url) + pr.SetXForwarded() + pr.Out.URL.Path = cleanPath(strings.ReplaceAll(pr.Out.URL.Path, p.stripPath, "")) +} + +func (p *proxy) modifyResponse(res *http.Response) error { + return nil +} + +func (p *proxy) errorHandler(rw http.ResponseWriter, req *http.Request, err error) { + rw.WriteHeader(http.StatusBadGateway) +} + +func cleanPath(p string) string { + if p == "" { + return "/" + } + if p[0] != '/' { + p = "/" + p + } + np := path.Clean(p) + if p[len(p)-1] == '/' && np != "/" { + if len(p) == len(np)+1 && strings.HasPrefix(p, np) { + np = p + } else { + np += "/" + } + } + return np +} diff --git a/signoz/ee/query-service/integrations/gateway/proxy_test.go b/signoz/ee/query-service/integrations/gateway/proxy_test.go new file mode 100644 index 0000000..45f5211 --- /dev/null +++ b/signoz/ee/query-service/integrations/gateway/proxy_test.go @@ -0,0 +1,61 @@ +package gateway + +import ( + "context" + "net/http" + "net/http/httputil" + "net/url" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestProxyRewrite(t *testing.T) { + testCases := []struct { + name string + url *url.URL + stripPath string + in *url.URL + expected *url.URL + }{ + { + name: "SamePathAdded", + url: &url.URL{Scheme: "http", Host: "backend", Path: "/path1"}, + stripPath: "/strip", + in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"}, + expected: &url.URL{Scheme: "http", Host: "backend", Path: "/path1/path1"}, + }, + { + name: "NoStripPathInput", + url: &url.URL{Scheme: "http", Host: "backend"}, + stripPath: "", + in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"}, + expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"}, + }, + { + name: "NoStripPathPresentInReq", + url: &url.URL{Scheme: "http", Host: "backend"}, + stripPath: "/not-found", + in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"}, + expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"}, + }, + } + + for _, tc := range testCases { + proxy, err := NewProxy(tc.url.String(), tc.stripPath) + require.NoError(t, err) + inReq, err := http.NewRequest(http.MethodGet, tc.in.String(), nil) + require.NoError(t, err) + proxyReq := &httputil.ProxyRequest{ + In: inReq, + Out: inReq.Clone(context.Background()), + } + proxy.Rewrite(proxyReq) + + assert.Equal(t, tc.expected.Host, proxyReq.Out.URL.Host) + assert.Equal(t, tc.expected.Scheme, proxyReq.Out.URL.Scheme) + assert.Equal(t, tc.expected.Path, proxyReq.Out.URL.Path) + assert.Equal(t, tc.expected.Query(), proxyReq.Out.URL.Query()) + } +} diff --git a/signoz/ee/query-service/integrations/signozio/response.go b/signoz/ee/query-service/integrations/signozio/response.go new file mode 100644 index 0000000..67ad8aa --- /dev/null +++ b/signoz/ee/query-service/integrations/signozio/response.go @@ -0,0 +1,15 @@ +package signozio + +type status string + +type ActivationResult struct { + Status status `json:"status"` + Data *ActivationResponse `json:"data,omitempty"` + ErrorType string `json:"errorType,omitempty"` + Error string `json:"error,omitempty"` +} + +type ActivationResponse struct { + ActivationId string `json:"ActivationId"` + PlanDetails string `json:"PlanDetails"` +} diff --git a/signoz/ee/query-service/integrations/signozio/signozio.go b/signoz/ee/query-service/integrations/signozio/signozio.go new file mode 100644 index 0000000..c18cfb6 --- /dev/null +++ b/signoz/ee/query-service/integrations/signozio/signozio.go @@ -0,0 +1,159 @@ +package signozio + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + + "github.com/pkg/errors" + "go.uber.org/zap" + + "go.signoz.io/signoz/ee/query-service/constants" + "go.signoz.io/signoz/ee/query-service/model" +) + +var C *Client + +const ( + POST = "POST" + APPLICATION_JSON = "application/json" +) + +type Client struct { + Prefix string +} + +func New() *Client { + return &Client{ + Prefix: constants.LicenseSignozIo, + } +} + +func init() { + C = New() +} + +// ActivateLicense sends key to license.signoz.io and gets activation data +func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError) { + licenseReq := map[string]string{ + "key": key, + "siteId": siteId, + } + + reqString, _ := json.Marshal(licenseReq) + httpResponse, err := http.Post(C.Prefix+"/licenses/activate", APPLICATION_JSON, bytes.NewBuffer(reqString)) + + if err != nil { + zap.L().Error("failed to connect to license.signoz.io", zap.Error(err)) + return nil, model.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection")) + } + + httpBody, err := io.ReadAll(httpResponse.Body) + if err != nil { + zap.L().Error("failed to read activation response from license.signoz.io", zap.Error(err)) + return nil, model.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io")) + } + + defer httpResponse.Body.Close() + + // read api request result + result := ActivationResult{} + err = json.Unmarshal(httpBody, &result) + if err != nil { + zap.L().Error("failed to marshal activation response from license.signoz.io", zap.Error(err)) + return nil, model.InternalError(errors.Wrap(err, "failed to marshal license activation response")) + } + + switch httpResponse.StatusCode { + case 200, 201: + return result.Data, nil + case 400, 401: + return nil, model.BadRequest(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error))) + default: + return nil, model.InternalError(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error))) + } + +} + +// ValidateLicense validates the license key +func ValidateLicense(activationId string) (*ActivationResponse, *model.ApiError) { + validReq := map[string]string{ + "activationId": activationId, + } + + reqString, _ := json.Marshal(validReq) + response, err := http.Post(C.Prefix+"/licenses/validate", APPLICATION_JSON, bytes.NewBuffer(reqString)) + + if err != nil { + return nil, model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection")) + } + + body, err := io.ReadAll(response.Body) + if err != nil { + return nil, model.BadRequest(errors.Wrap(err, "failed to read validation response from license.signoz.io")) + } + + defer response.Body.Close() + + switch response.StatusCode { + case 200, 201: + a := ActivationResult{} + err = json.Unmarshal(body, &a) + if err != nil { + return nil, model.BadRequest(errors.Wrap(err, "failed to marshal license validation response")) + } + return a.Data, nil + case 400, 401: + return nil, model.BadRequest(errors.Wrap(fmt.Errorf(string(body)), + "bad request error received from license.signoz.io")) + default: + return nil, model.InternalError(errors.Wrap(fmt.Errorf(string(body)), + "internal error received from license.signoz.io")) + } + +} + +func NewPostRequestWithCtx(ctx context.Context, url string, contentType string, body io.Reader) (*http.Request, error) { + req, err := http.NewRequestWithContext(ctx, POST, url, body) + if err != nil { + return nil, err + } + req.Header.Add("Content-Type", contentType) + return req, err + +} + +// SendUsage reports the usage of signoz to license server +func SendUsage(ctx context.Context, usage model.UsagePayload) *model.ApiError { + reqString, _ := json.Marshal(usage) + req, err := NewPostRequestWithCtx(ctx, C.Prefix+"/usage", APPLICATION_JSON, bytes.NewBuffer(reqString)) + if err != nil { + return model.BadRequest(errors.Wrap(err, "unable to create http request")) + } + + res, err := http.DefaultClient.Do(req) + if err != nil { + return model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection")) + } + + body, err := io.ReadAll(res.Body) + if err != nil { + return model.BadRequest(errors.Wrap(err, "failed to read usage response from license.signoz.io")) + } + + defer res.Body.Close() + + switch res.StatusCode { + case 200, 201: + return nil + case 400, 401: + return model.BadRequest(errors.Wrap(fmt.Errorf(string(body)), + "bad request error received from license.signoz.io")) + default: + return model.InternalError(errors.Wrap(fmt.Errorf(string(body)), + "internal error received from license.signoz.io")) + } +} diff --git a/signoz/ee/query-service/interfaces/connector.go b/signoz/ee/query-service/interfaces/connector.go new file mode 100644 index 0000000..5428e42 --- /dev/null +++ b/signoz/ee/query-service/interfaces/connector.go @@ -0,0 +1,12 @@ +package interfaces + +import ( + baseint "go.signoz.io/signoz/pkg/query-service/interfaces" +) + +// Connector defines methods for interaction +// with o11y data. for example - clickhouse +type DataConnector interface { + Start(readerReady chan bool) + baseint.Reader +} diff --git a/signoz/ee/query-service/license/db.go b/signoz/ee/query-service/license/db.go new file mode 100644 index 0000000..f6ccc88 --- /dev/null +++ b/signoz/ee/query-service/license/db.go @@ -0,0 +1,206 @@ +package license + +import ( + "context" + "database/sql" + "fmt" + "time" + + "github.com/jmoiron/sqlx" + + "go.signoz.io/signoz/ee/query-service/license/sqlite" + "go.signoz.io/signoz/ee/query-service/model" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.uber.org/zap" +) + +// Repo is license repo. stores license keys in a secured DB +type Repo struct { + db *sqlx.DB +} + +// NewLicenseRepo initiates a new license repo +func NewLicenseRepo(db *sqlx.DB) Repo { + return Repo{ + db: db, + } +} + +func (r *Repo) InitDB(engine string) error { + switch engine { + case "sqlite3", "sqlite": + return sqlite.InitDB(r.db) + default: + return fmt.Errorf("unsupported db") + } +} + +func (r *Repo) GetLicenses(ctx context.Context) ([]model.License, error) { + licenses := []model.License{} + + query := "SELECT key, activationId, planDetails, validationMessage FROM licenses" + + err := r.db.Select(&licenses, query) + if err != nil { + return nil, fmt.Errorf("failed to get licenses from db: %v", err) + } + + return licenses, nil +} + +// GetActiveLicense fetches the latest active license from DB. +// If the license is not present, expect a nil license and a nil error in the output. +func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel.ApiError) { + var err error + licenses := []model.License{} + + query := "SELECT key, activationId, planDetails, validationMessage FROM licenses" + + err = r.db.Select(&licenses, query) + if err != nil { + return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err)) + } + + var active *model.License + for _, l := range licenses { + l.ParsePlan() + if active == nil && + (l.ValidFrom != 0) && + (l.ValidUntil == -1 || l.ValidUntil > time.Now().Unix()) { + active = &l + } + if active != nil && + l.ValidFrom > active.ValidFrom && + (l.ValidUntil == -1 || l.ValidUntil > time.Now().Unix()) { + active = &l + } + } + + return active, nil +} + +// InsertLicense inserts a new license in db +func (r *Repo) InsertLicense(ctx context.Context, l *model.License) error { + + if l.Key == "" { + return fmt.Errorf("insert license failed: license key is required") + } + + query := `INSERT INTO licenses + (key, planDetails, activationId, validationmessage) + VALUES ($1, $2, $3, $4)` + + _, err := r.db.ExecContext(ctx, + query, + l.Key, + l.PlanDetails, + l.ActivationId, + l.ValidationMessage) + + if err != nil { + zap.L().Error("error in inserting license data: ", zap.Error(err)) + return fmt.Errorf("failed to insert license in db: %v", err) + } + + return nil +} + +// UpdatePlanDetails writes new plan details to the db +func (r *Repo) UpdatePlanDetails(ctx context.Context, + key, + planDetails string) error { + + if key == "" { + return fmt.Errorf("update plan details failed: license key is required") + } + + query := `UPDATE licenses + SET planDetails = $1, + updatedAt = $2 + WHERE key = $3` + + _, err := r.db.ExecContext(ctx, query, planDetails, time.Now(), key) + + if err != nil { + zap.L().Error("error in updating license: ", zap.Error(err)) + return fmt.Errorf("failed to update license in db: %v", err) + } + + return nil +} + +func (r *Repo) CreateFeature(req *basemodel.Feature) *basemodel.ApiError { + + _, err := r.db.Exec( + `INSERT INTO feature_status (name, active, usage, usage_limit, route) + VALUES (?, ?, ?, ?, ?);`, + req.Name, req.Active, req.Usage, req.UsageLimit, req.Route) + if err != nil { + return &basemodel.ApiError{Typ: basemodel.ErrorInternal, Err: err} + } + return nil +} + +func (r *Repo) GetFeature(featureName string) (basemodel.Feature, error) { + + var feature basemodel.Feature + + err := r.db.Get(&feature, + `SELECT * FROM feature_status WHERE name = ?;`, featureName) + if err != nil { + return feature, err + } + if feature.Name == "" { + return feature, basemodel.ErrFeatureUnavailable{Key: featureName} + } + return feature, nil +} + +func (r *Repo) GetAllFeatures() ([]basemodel.Feature, error) { + + var feature []basemodel.Feature + + err := r.db.Select(&feature, + `SELECT * FROM feature_status;`) + if err != nil { + return feature, err + } + + return feature, nil +} + +func (r *Repo) UpdateFeature(req basemodel.Feature) error { + + _, err := r.db.Exec( + `UPDATE feature_status SET active = ?, usage = ?, usage_limit = ?, route = ? WHERE name = ?;`, + req.Active, req.Usage, req.UsageLimit, req.Route, req.Name) + if err != nil { + return err + } + return nil +} + +func (r *Repo) InitFeatures(req basemodel.FeatureSet) error { + // get a feature by name, if it doesn't exist, create it. If it does exist, update it. + for _, feature := range req { + currentFeature, err := r.GetFeature(feature.Name) + if err != nil && err == sql.ErrNoRows { + err := r.CreateFeature(&feature) + if err != nil { + return err + } + continue + } else if err != nil { + return err + } + feature.Usage = currentFeature.Usage + if feature.Usage >= feature.UsageLimit && feature.UsageLimit != -1 { + feature.Active = false + } + err = r.UpdateFeature(feature) + if err != nil { + return err + } + } + return nil +} diff --git a/signoz/ee/query-service/license/manager.go b/signoz/ee/query-service/license/manager.go new file mode 100644 index 0000000..7488760 --- /dev/null +++ b/signoz/ee/query-service/license/manager.go @@ -0,0 +1,334 @@ +package license + +import ( + "context" + "fmt" + "sync/atomic" + "time" + + "github.com/jmoiron/sqlx" + + "sync" + + "go.signoz.io/signoz/pkg/query-service/auth" + baseconstants "go.signoz.io/signoz/pkg/query-service/constants" + + validate "go.signoz.io/signoz/ee/query-service/integrations/signozio" + "go.signoz.io/signoz/ee/query-service/model" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.signoz.io/signoz/pkg/query-service/telemetry" + "go.uber.org/zap" +) + +var LM *Manager + +// validate and update license every 24 hours +var validationFrequency = 24 * 60 * time.Minute + +type Manager struct { + repo *Repo + mutex sync.Mutex + + validatorRunning bool + + // end the license validation, this is important to gracefully + // stopping validation and protect in-consistent updates + done chan struct{} + + // terminated waits for the validate go routine to end + terminated chan struct{} + + // last time the license was validated + lastValidated int64 + + // keep track of validation failure attempts + failedAttempts uint64 + + // keep track of active license and features + activeLicense *model.License + activeFeatures basemodel.FeatureSet +} + +func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*Manager, error) { + if LM != nil { + return LM, nil + } + + repo := NewLicenseRepo(db) + err := repo.InitDB(dbType) + + if err != nil { + return nil, fmt.Errorf("failed to initiate license repo: %v", err) + } + + m := &Manager{ + repo: &repo, + } + + if err := m.start(features...); err != nil { + return m, err + } + LM = m + return m, nil +} + +// start loads active license in memory and initiates validator +func (lm *Manager) start(features ...basemodel.Feature) error { + err := lm.LoadActiveLicense(features...) + + return err +} + +func (lm *Manager) Stop() { + close(lm.done) + <-lm.terminated +} + +func (lm *Manager) SetActive(l *model.License, features ...basemodel.Feature) { + lm.mutex.Lock() + defer lm.mutex.Unlock() + + if l == nil { + return + } + + lm.activeLicense = l + lm.activeFeatures = append(l.FeatureSet, features...) + // set default features + setDefaultFeatures(lm) + + err := lm.InitFeatures(lm.activeFeatures) + if err != nil { + zap.L().Panic("Couldn't activate features", zap.Error(err)) + } + if !lm.validatorRunning { + // we want to make sure only one validator runs, + // we already have lock() so good to go + lm.validatorRunning = true + go lm.Validator(context.Background()) + } + +} + +func setDefaultFeatures(lm *Manager) { + lm.activeFeatures = append(lm.activeFeatures, baseconstants.DEFAULT_FEATURE_SET...) +} + +// LoadActiveLicense loads the most recent active license +func (lm *Manager) LoadActiveLicense(features ...basemodel.Feature) error { + active, err := lm.repo.GetActiveLicense(context.Background()) + if err != nil { + return err + } + if active != nil { + lm.SetActive(active, features...) + } else { + zap.L().Info("No active license found, defaulting to basic plan") + // if no active license is found, we default to basic(free) plan with all default features + lm.activeFeatures = model.BasicPlan + setDefaultFeatures(lm) + err := lm.InitFeatures(lm.activeFeatures) + if err != nil { + zap.L().Error("Couldn't initialize features", zap.Error(err)) + return err + } + } + + return nil +} + +func (lm *Manager) GetLicenses(ctx context.Context) (response []model.License, apiError *model.ApiError) { + + licenses, err := lm.repo.GetLicenses(ctx) + if err != nil { + return nil, model.InternalError(err) + } + + for _, l := range licenses { + l.ParsePlan() + + if l.Key == lm.activeLicense.Key { + l.IsCurrent = true + } + + if l.ValidUntil == -1 { + // for subscriptions, there is no end-date as such + // but for showing user some validity we default one year timespan + l.ValidUntil = l.ValidFrom + 31556926 + } + + response = append(response, l) + } + + return +} + +// Validator validates license after an epoch of time +func (lm *Manager) Validator(ctx context.Context) { + defer close(lm.terminated) + tick := time.NewTicker(validationFrequency) + defer tick.Stop() + + lm.Validate(ctx) + + for { + select { + case <-lm.done: + return + default: + select { + case <-lm.done: + return + case <-tick.C: + lm.Validate(ctx) + } + } + + } +} + +// Validate validates the current active license +func (lm *Manager) Validate(ctx context.Context) (reterr error) { + zap.L().Info("License validation started") + if lm.activeLicense == nil { + return nil + } + + defer func() { + lm.mutex.Lock() + + lm.lastValidated = time.Now().Unix() + if reterr != nil { + zap.L().Error("License validation completed with error", zap.Error(reterr)) + atomic.AddUint64(&lm.failedAttempts, 1) + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED, + map[string]interface{}{"err": reterr.Error()}, "", true, false) + } else { + zap.L().Info("License validation completed with no errors") + } + + lm.mutex.Unlock() + }() + + response, apiError := validate.ValidateLicense(lm.activeLicense.ActivationId) + if apiError != nil { + zap.L().Error("failed to validate license", zap.Error(apiError.Err)) + return apiError.Err + } + + if response.PlanDetails == lm.activeLicense.PlanDetails { + // license plan hasnt changed, nothing to do + return nil + } + + if response.PlanDetails != "" { + + // copy and replace the active license record + l := model.License{ + Key: lm.activeLicense.Key, + CreatedAt: lm.activeLicense.CreatedAt, + PlanDetails: response.PlanDetails, + ValidationMessage: lm.activeLicense.ValidationMessage, + ActivationId: lm.activeLicense.ActivationId, + } + + if err := l.ParsePlan(); err != nil { + zap.L().Error("failed to parse updated license", zap.Error(err)) + return err + } + + // updated plan is parsable, check if plan has changed + if lm.activeLicense.PlanDetails != response.PlanDetails { + err := lm.repo.UpdatePlanDetails(ctx, lm.activeLicense.Key, response.PlanDetails) + if err != nil { + // unexpected db write issue but we can let the user continue + // and wait for update to work in next cycle. + zap.L().Error("failed to validate license", zap.Error(err)) + } + } + + // activate the update license plan + lm.SetActive(&l) + } + + return nil +} + +// Activate activates a license key with signoz server +func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *model.License, errResponse *model.ApiError) { + defer func() { + if errResponse != nil { + userEmail, err := auth.GetEmailFromJwt(ctx) + if err == nil { + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED, + map[string]interface{}{"err": errResponse.Err.Error()}, userEmail, true, false) + } + } + }() + + response, apiError := validate.ActivateLicense(key, "") + if apiError != nil { + zap.L().Error("failed to activate license", zap.Error(apiError.Err)) + return nil, apiError + } + + l := &model.License{ + Key: key, + ActivationId: response.ActivationId, + PlanDetails: response.PlanDetails, + } + + // parse validity and features from the plan details + err := l.ParsePlan() + + if err != nil { + zap.L().Error("failed to activate license", zap.Error(err)) + return nil, model.InternalError(err) + } + + // store the license before activating it + err = lm.repo.InsertLicense(ctx, l) + if err != nil { + zap.L().Error("failed to activate license", zap.Error(err)) + return nil, model.InternalError(err) + } + + // license is valid, activate it + lm.SetActive(l) + return l, nil +} + +// CheckFeature will be internally used by backend routines +// for feature gating +func (lm *Manager) CheckFeature(featureKey string) error { + feature, err := lm.repo.GetFeature(featureKey) + if err != nil { + return err + } + if feature.Active { + return nil + } + return basemodel.ErrFeatureUnavailable{Key: featureKey} +} + +// GetFeatureFlags returns current active features +func (lm *Manager) GetFeatureFlags() (basemodel.FeatureSet, error) { + return lm.repo.GetAllFeatures() +} + +func (lm *Manager) InitFeatures(features basemodel.FeatureSet) error { + return lm.repo.InitFeatures(features) +} + +func (lm *Manager) UpdateFeatureFlag(feature basemodel.Feature) error { + return lm.repo.UpdateFeature(feature) +} + +func (lm *Manager) GetFeatureFlag(key string) (basemodel.Feature, error) { + return lm.repo.GetFeature(key) +} + +// GetRepo return the license repo +func (lm *Manager) GetRepo() *Repo { + return lm.repo +} diff --git a/signoz/ee/query-service/license/sqlite/init.go b/signoz/ee/query-service/license/sqlite/init.go new file mode 100644 index 0000000..c80bbd5 --- /dev/null +++ b/signoz/ee/query-service/license/sqlite/init.go @@ -0,0 +1,52 @@ +package sqlite + +import ( + "fmt" + + "github.com/jmoiron/sqlx" +) + +func InitDB(db *sqlx.DB) error { + var err error + if db == nil { + return fmt.Errorf("invalid db connection") + } + + table_schema := `CREATE TABLE IF NOT EXISTS licenses( + key TEXT PRIMARY KEY, + createdAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + planDetails TEXT, + activationId TEXT, + validationMessage TEXT, + lastValidated TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ); + + CREATE TABLE IF NOT EXISTS sites( + uuid TEXT PRIMARY KEY, + alias VARCHAR(180) DEFAULT 'PROD', + url VARCHAR(300), + createdAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ); + ` + + _, err = db.Exec(table_schema) + if err != nil { + return fmt.Errorf("error in creating licenses table: %s", err.Error()) + } + + table_schema = `CREATE TABLE IF NOT EXISTS feature_status ( + name TEXT PRIMARY KEY, + active bool, + usage INTEGER DEFAULT 0, + usage_limit INTEGER DEFAULT 0, + route TEXT + );` + + _, err = db.Exec(table_schema) + if err != nil { + return fmt.Errorf("error in creating feature_status table: %s", err.Error()) + } + + return nil +} diff --git a/signoz/ee/query-service/main.go b/signoz/ee/query-service/main.go new file mode 100644 index 0000000..c5a03f4 --- /dev/null +++ b/signoz/ee/query-service/main.go @@ -0,0 +1,179 @@ +package main + +import ( + "context" + "flag" + "log" + "os" + "os/signal" + "strconv" + "syscall" + "time" + + "go.opentelemetry.io/otel/sdk/resource" + semconv "go.opentelemetry.io/otel/semconv/v1.4.0" + "go.signoz.io/signoz/ee/query-service/app" + "go.signoz.io/signoz/pkg/query-service/auth" + baseconst "go.signoz.io/signoz/pkg/query-service/constants" + "go.signoz.io/signoz/pkg/query-service/migrate" + "go.signoz.io/signoz/pkg/query-service/version" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials/insecure" + + zapotlpencoder "github.com/SigNoz/zap_otlp/zap_otlp_encoder" + zapotlpsync "github.com/SigNoz/zap_otlp/zap_otlp_sync" + + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger { + config := zap.NewProductionConfig() + ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt) + defer stop() + + config.EncoderConfig.EncodeDuration = zapcore.MillisDurationEncoder + config.EncoderConfig.EncodeLevel = zapcore.CapitalLevelEncoder + config.EncoderConfig.TimeKey = "timestamp" + config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder + + otlpEncoder := zapotlpencoder.NewOTLPEncoder(config.EncoderConfig) + consoleEncoder := zapcore.NewJSONEncoder(config.EncoderConfig) + defaultLogLevel := zapcore.InfoLevel + + res := resource.NewWithAttributes( + semconv.SchemaURL, + semconv.ServiceNameKey.String("query-service"), + ) + + core := zapcore.NewTee( + zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel), + ) + + if enableQueryServiceLogOTLPExport { + ctx, cancel := context.WithTimeout(ctx, time.Second*30) + defer cancel() + conn, err := grpc.DialContext(ctx, baseconst.OTLPTarget, grpc.WithBlock(), grpc.WithTransportCredentials(insecure.NewCredentials())) + if err != nil { + log.Fatalf("failed to establish connection: %v", err) + } else { + logExportBatchSizeInt, err := strconv.Atoi(baseconst.LogExportBatchSize) + if err != nil { + logExportBatchSizeInt = 512 + } + ws := zapcore.AddSync(zapotlpsync.NewOtlpSyncer(conn, zapotlpsync.Options{ + BatchSize: logExportBatchSizeInt, + ResourceSchema: semconv.SchemaURL, + Resource: res, + })) + core = zapcore.NewTee( + zapcore.NewCore(consoleEncoder, os.Stdout, defaultLogLevel), + zapcore.NewCore(otlpEncoder, zapcore.NewMultiWriteSyncer(ws), defaultLogLevel), + ) + } + } + logger := zap.New(core, zap.AddCaller(), zap.AddStacktrace(zapcore.ErrorLevel)) + + return logger +} + +func main() { + var promConfigPath, skipTopLvlOpsPath string + + // disables rule execution but allows change to the rule definition + var disableRules bool + + // the url used to build link in the alert messages in slack and other systems + var ruleRepoURL string + var cluster string + + var cacheConfigPath, fluxInterval string + var enableQueryServiceLogOTLPExport bool + var preferSpanMetrics bool + + var maxIdleConns int + var maxOpenConns int + var dialTimeout time.Duration + var gatewayUrl string + + flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)") + flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)") + flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)") + flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)") + flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)") + flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)") + flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)") + flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)") + flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)") + flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)") + flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)") + flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')") + flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)") + + flag.Parse() + + loggerMgr := initZapLog(enableQueryServiceLogOTLPExport) + + zap.ReplaceGlobals(loggerMgr) + defer loggerMgr.Sync() // flushes buffer, if any + + version.PrintVersion() + + serverOptions := &app.ServerOptions{ + HTTPHostPort: baseconst.HTTPHostPort, + PromConfigPath: promConfigPath, + SkipTopLvlOpsPath: skipTopLvlOpsPath, + PreferSpanMetrics: preferSpanMetrics, + PrivateHostPort: baseconst.PrivateHostPort, + DisableRules: disableRules, + RuleRepoURL: ruleRepoURL, + MaxIdleConns: maxIdleConns, + MaxOpenConns: maxOpenConns, + DialTimeout: dialTimeout, + CacheConfigPath: cacheConfigPath, + FluxInterval: fluxInterval, + Cluster: cluster, + GatewayUrl: gatewayUrl, + } + + // Read the jwt secret key + auth.JwtSecret = os.Getenv("SIGNOZ_JWT_SECRET") + + if len(auth.JwtSecret) == 0 { + zap.L().Warn("No JWT secret key is specified.") + } else { + zap.L().Info("JWT secret key set successfully.") + } + + if err := migrate.Migrate(baseconst.RELATIONAL_DATASOURCE_PATH); err != nil { + zap.L().Error("Failed to migrate", zap.Error(err)) + } else { + zap.L().Info("Migration successful") + } + + server, err := app.NewServer(serverOptions) + if err != nil { + zap.L().Fatal("Failed to create server", zap.Error(err)) + } + + if err := server.Start(); err != nil { + zap.L().Fatal("Could not start server", zap.Error(err)) + } + + if err := auth.InitAuthCache(context.Background()); err != nil { + zap.L().Fatal("Failed to initialize auth cache", zap.Error(err)) + } + + signalsChannel := make(chan os.Signal, 1) + signal.Notify(signalsChannel, os.Interrupt, syscall.SIGTERM) + + for { + select { + case status := <-server.HealthCheckStatus(): + zap.L().Info("Received HealthCheck status: ", zap.Int("status", int(status))) + case <-signalsChannel: + zap.L().Fatal("Received OS Interrupt Signal ... ") + server.Stop() + } + } +} diff --git a/signoz/ee/query-service/model/auth.go b/signoz/ee/query-service/model/auth.go new file mode 100644 index 0000000..9ad83cb --- /dev/null +++ b/signoz/ee/query-service/model/auth.go @@ -0,0 +1,12 @@ +package model + +import ( + basemodel "go.signoz.io/signoz/pkg/query-service/model" +) + +// GettableInvitation overrides base object and adds precheck into +// response +type GettableInvitation struct { + *basemodel.InvitationResponseObject + Precheck *basemodel.PrecheckResponse `json:"precheck"` +} diff --git a/signoz/ee/query-service/model/domain.go b/signoz/ee/query-service/model/domain.go new file mode 100644 index 0000000..59a2493 --- /dev/null +++ b/signoz/ee/query-service/model/domain.go @@ -0,0 +1,181 @@ +package model + +import ( + "encoding/json" + "fmt" + "net/url" + "strings" + + "github.com/google/uuid" + "github.com/pkg/errors" + saml2 "github.com/russellhaering/gosaml2" + "go.signoz.io/signoz/ee/query-service/sso" + "go.signoz.io/signoz/ee/query-service/sso/saml" + basemodel "go.signoz.io/signoz/pkg/query-service/model" + "go.uber.org/zap" +) + +type SSOType string + +const ( + SAML SSOType = "SAML" + GoogleAuth SSOType = "GOOGLE_AUTH" +) + +// OrgDomain identify org owned web domains for auth and other purposes +type OrgDomain struct { + Id uuid.UUID `json:"id"` + Name string `json:"name"` + OrgId string `json:"orgId"` + SsoEnabled bool `json:"ssoEnabled"` + SsoType SSOType `json:"ssoType"` + + SamlConfig *SamlConfig `json:"samlConfig"` + GoogleAuthConfig *GoogleOAuthConfig `json:"googleAuthConfig"` + + Org *basemodel.Organization +} + +func (od *OrgDomain) String() string { + return fmt.Sprintf("[%s]%s-%s ", od.Name, od.Id.String(), od.SsoType) +} + +// Valid is used a pipeline function to check if org domain +// loaded from db is valid +func (od *OrgDomain) Valid(err error) error { + if err != nil { + return err + } + + if od.Id == uuid.Nil || od.OrgId == "" { + return fmt.Errorf("both id and orgId are required") + } + + return nil +} + +// ValidNew cheks if the org domain is valid for insertion in db +func (od *OrgDomain) ValidNew() error { + + if od.OrgId == "" { + return fmt.Errorf("orgId is required") + } + + if od.Name == "" { + return fmt.Errorf("name is required") + } + + return nil +} + +// LoadConfig loads config params from json text +func (od *OrgDomain) LoadConfig(jsondata string) error { + d := *od + err := json.Unmarshal([]byte(jsondata), &d) + if err != nil { + return errors.Wrap(err, "failed to marshal json to OrgDomain{}") + } + *od = d + return nil +} + +func (od *OrgDomain) GetSAMLEntityID() string { + if od.SamlConfig != nil { + return od.SamlConfig.SamlEntity + } + return "" +} + +func (od *OrgDomain) GetSAMLIdpURL() string { + if od.SamlConfig != nil { + return od.SamlConfig.SamlIdp + } + return "" +} + +func (od *OrgDomain) GetSAMLCert() string { + if od.SamlConfig != nil { + return od.SamlConfig.SamlCert + } + return "" +} + +// PrepareGoogleOAuthProvider creates GoogleProvider that is used in +// requesting OAuth and also used in processing response from google +func (od *OrgDomain) PrepareGoogleOAuthProvider(siteUrl *url.URL) (sso.OAuthCallbackProvider, error) { + if od.GoogleAuthConfig == nil { + return nil, fmt.Errorf("GOOGLE OAUTH is not setup correctly for this domain") + } + + return od.GoogleAuthConfig.GetProvider(od.Name, siteUrl) +} + +// PrepareSamlRequest creates a request accordingly gosaml2 +func (od *OrgDomain) PrepareSamlRequest(siteUrl *url.URL) (*saml2.SAMLServiceProvider, error) { + + // this is the url Idp will call after login completion + acs := fmt.Sprintf("%s://%s/%s", + siteUrl.Scheme, + siteUrl.Host, + "api/v1/complete/saml") + + // this is the address of the calling url, useful to redirect user + sourceUrl := fmt.Sprintf("%s://%s%s", + siteUrl.Scheme, + siteUrl.Host, + siteUrl.Path) + + // ideally this should be some unique ID for each installation + // but since we dont have UI to support it, we default it to + // host. this issuer is an identifier of service provider (signoz) + // on id provider (e.g. azure, okta). Azure requires this id to be configured + // in their system, while others seem to not care about it. + // currently we default it to host from window.location (received from browser) + issuer := siteUrl.Host + + return saml.PrepareRequest(issuer, acs, sourceUrl, od.GetSAMLEntityID(), od.GetSAMLIdpURL(), od.GetSAMLCert()) +} + +func (od *OrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err error) { + + fmtDomainId := strings.Replace(od.Id.String(), "-", ":", -1) + + // build redirect url from window.location sent by frontend + redirectURL := fmt.Sprintf("%s://%s%s", siteUrl.Scheme, siteUrl.Host, siteUrl.Path) + + // prepare state that gets relayed back when the auth provider + // calls back our url. here we pass the app url (where signoz runs) + // and the domain Id. The domain Id helps in identifying sso config + // when the call back occurs and the app url is useful in redirecting user + // back to the right path. + // why do we need to pass app url? the callback typically is handled by backend + // and sometimes backend might right at a different port or is unaware of frontend + // endpoint (unless SITE_URL param is set). hence, we receive this build sso request + // along with frontend window.location and use it to relay the information through + // auth provider to the backend (HandleCallback or HandleSSO method). + relayState := fmt.Sprintf("%s?domainId=%s", redirectURL, fmtDomainId) + + switch od.SsoType { + case SAML: + + sp, err := od.PrepareSamlRequest(siteUrl) + if err != nil { + return "", err + } + + return sp.BuildAuthURL(relayState) + + case GoogleAuth: + + googleProvider, err := od.PrepareGoogleOAuthProvider(siteUrl) + if err != nil { + return "", err + } + return googleProvider.BuildAuthURL(relayState) + + default: + zap.L().Error("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name)) + return "", fmt.Errorf("unsupported SSO config for the domain") + } + +} diff --git a/signoz/ee/query-service/model/errors.go b/signoz/ee/query-service/model/errors.go new file mode 100644 index 0000000..7e7b841 --- /dev/null +++ b/signoz/ee/query-service/model/errors.go @@ -0,0 +1,109 @@ +package model + +import ( + "fmt" + + basemodel "go.signoz.io/signoz/pkg/query-service/model" +) + +type ApiError struct { + Typ basemodel.ErrorType + Err error +} + +func (a *ApiError) Type() basemodel.ErrorType { + return a.Typ +} + +func (a *ApiError) ToError() error { + if a != nil { + return a.Err + } + return a.Err +} + +func (a *ApiError) Error() string { + return a.Err.Error() +} + +func (a *ApiError) IsNil() bool { + return a == nil || a.Err == nil +} + +// NewApiError returns a ApiError object of given type +func NewApiError(typ basemodel.ErrorType, err error) *ApiError { + return &ApiError{ + Typ: typ, + Err: err, + } +} + +// BadRequest returns a ApiError object of bad request +func BadRequest(err error) *ApiError { + return &ApiError{ + Typ: basemodel.ErrorBadData, + Err: err, + } +} + +// BadRequestStr returns a ApiError object of bad request for string input +func BadRequestStr(s string) *ApiError { + return &ApiError{ + Typ: basemodel.ErrorBadData, + Err: fmt.Errorf(s), + } +} + +// InternalError returns a ApiError object of internal type +func InternalError(err error) *ApiError { + return &ApiError{ + Typ: basemodel.ErrorInternal, + Err: err, + } +} + +// InternalErrorStr returns a ApiError object of internal type for string input +func InternalErrorStr(s string) *ApiError { + return &ApiError{ + Typ: basemodel.ErrorInternal, + Err: fmt.Errorf(s), + } +} + +var ( + ErrorNone basemodel.ErrorType = "" + ErrorTimeout basemodel.ErrorType = "timeout" + ErrorCanceled basemodel.ErrorType = "canceled" + ErrorExec basemodel.ErrorType = "execution" + ErrorBadData basemodel.ErrorType = "bad_data" + ErrorInternal basemodel.ErrorType = "internal" + ErrorUnavailable basemodel.ErrorType = "unavailable" + ErrorNotFound basemodel.ErrorType = "not_found" + ErrorNotImplemented basemodel.ErrorType = "not_implemented" + ErrorUnauthorized basemodel.ErrorType = "unauthorized" + ErrorForbidden basemodel.ErrorType = "forbidden" + ErrorConflict basemodel.ErrorType = "conflict" + ErrorStreamingNotSupported basemodel.ErrorType = "streaming is not supported" +) + +func init() { + ErrorNone = basemodel.ErrorNone + ErrorTimeout = basemodel.ErrorTimeout + ErrorCanceled = basemodel.ErrorCanceled + ErrorExec = basemodel.ErrorExec + ErrorBadData = basemodel.ErrorBadData + ErrorInternal = basemodel.ErrorInternal + ErrorUnavailable = basemodel.ErrorUnavailable + ErrorNotFound = basemodel.ErrorNotFound + ErrorNotImplemented = basemodel.ErrorNotImplemented + ErrorUnauthorized = basemodel.ErrorUnauthorized + ErrorForbidden = basemodel.ErrorForbidden + ErrorConflict = basemodel.ErrorConflict + ErrorStreamingNotSupported = basemodel.ErrorStreamingNotSupported +} + +type ErrUnsupportedAuth struct{} + +func (errUnsupportedAuth ErrUnsupportedAuth) Error() string { + return "this authentication method not supported" +} diff --git a/signoz/ee/query-service/model/license.go b/signoz/ee/query-service/model/license.go new file mode 100644 index 0000000..7ad349c --- /dev/null +++ b/signoz/ee/query-service/model/license.go @@ -0,0 +1,106 @@ +package model + +import ( + "encoding/base64" + "encoding/json" + "time" + + "github.com/pkg/errors" + basemodel "go.signoz.io/signoz/pkg/query-service/model" +) + +type License struct { + Key string `json:"key" db:"key"` + ActivationId string `json:"activationId" db:"activationId"` + CreatedAt time.Time `db:"created_at"` + + // PlanDetails contains the encrypted plan info + PlanDetails string `json:"planDetails" db:"planDetails"` + + // stores parsed license details + LicensePlan + + FeatureSet basemodel.FeatureSet + + // populated in case license has any errors + ValidationMessage string `db:"validationMessage"` + + // used only for sending details to front-end + IsCurrent bool `json:"isCurrent"` +} + +func (l *License) MarshalJSON() ([]byte, error) { + + return json.Marshal(&struct { + Key string `json:"key" db:"key"` + ActivationId string `json:"activationId" db:"activationId"` + ValidationMessage string `db:"validationMessage"` + IsCurrent bool `json:"isCurrent"` + PlanKey string `json:"planKey"` + ValidFrom time.Time `json:"ValidFrom"` + ValidUntil time.Time `json:"ValidUntil"` + Status string `json:"status"` + }{ + Key: l.Key, + ActivationId: l.ActivationId, + IsCurrent: l.IsCurrent, + PlanKey: l.PlanKey, + ValidFrom: time.Unix(l.ValidFrom, 0), + ValidUntil: time.Unix(l.ValidUntil, 0), + Status: l.Status, + ValidationMessage: l.ValidationMessage, + }) +} + +type LicensePlan struct { + PlanKey string `json:"planKey"` + ValidFrom int64 `json:"validFrom"` + ValidUntil int64 `json:"validUntil"` + Status string `json:"status"` +} + +func (l *License) ParsePlan() error { + l.LicensePlan = LicensePlan{} + + planData, err := base64.StdEncoding.DecodeString(l.PlanDetails) + if err != nil { + return err + } + + plan := LicensePlan{} + err = json.Unmarshal([]byte(planData), &plan) + if err != nil { + l.ValidationMessage = "failed to parse plan from license" + return errors.Wrap(err, "failed to parse plan from license") + } + + l.LicensePlan = plan + l.ParseFeatures() + return nil +} + +func (l *License) ParseFeatures() { + switch l.PlanKey { + case Pro: + l.FeatureSet = ProPlan + case Enterprise: + l.FeatureSet = EnterprisePlan + default: + l.FeatureSet = BasicPlan + } +} + +type Licenses struct { + TrialStart int64 `json:"trialStart"` + TrialEnd int64 `json:"trialEnd"` + OnTrial bool `json:"onTrial"` + WorkSpaceBlock bool `json:"workSpaceBlock"` + TrialConvertedToSubscription bool `json:"trialConvertedToSubscription"` + GracePeriodEnd int64 `json:"gracePeriodEnd"` + Licenses []License `json:"licenses"` +} + +type SubscriptionServerResp struct { + Status string `json:"status"` + Data Licenses `json:"data"` +} diff --git a/signoz/ee/query-service/model/pat.go b/signoz/ee/query-service/model/pat.go new file mode 100644 index 0000000..ef683a0 --- /dev/null +++ b/signoz/ee/query-service/model/pat.go @@ -0,0 +1,32 @@ +package model + +type User struct { + Id string `json:"id" db:"id"` + Name string `json:"name" db:"name"` + Email string `json:"email" db:"email"` + CreatedAt int64 `json:"createdAt" db:"created_at"` + ProfilePictureURL string `json:"profilePictureURL" db:"profile_picture_url"` + NotFound bool `json:"notFound"` +} + +type CreatePATRequestBody struct { + Name string `json:"name"` + Role string `json:"role"` + ExpiresInDays int64 `json:"expiresInDays"` +} + +type PAT struct { + Id string `json:"id" db:"id"` + UserID string `json:"userId" db:"user_id"` + CreatedByUser User `json:"createdByUser"` + UpdatedByUser User `json:"updatedByUser"` + Token string `json:"token" db:"token"` + Role string `json:"role" db:"role"` + Name string `json:"name" db:"name"` + CreatedAt int64 `json:"createdAt" db:"created_at"` + ExpiresAt int64 `json:"expiresAt" db:"expires_at"` + UpdatedAt int64 `json:"updatedAt" db:"updated_at"` + LastUsed int64 `json:"lastUsed" db:"last_used"` + Revoked bool `json:"revoked" db:"revoked"` + UpdatedByUserID string `json:"updatedByUserId" db:"updated_by_user_id"` +} diff --git a/signoz/ee/query-service/model/plans.go b/signoz/ee/query-service/model/plans.go new file mode 100644 index 0000000..135d276 --- /dev/null +++ b/signoz/ee/query-service/model/plans.go @@ -0,0 +1,338 @@ +package model + +import ( + basemodel "go.signoz.io/signoz/pkg/query-service/model" +) + +const SSO = "SSO" +const Basic = "BASIC_PLAN" +const Pro = "PRO_PLAN" +const Enterprise = "ENTERPRISE_PLAN" +const DisableUpsell = "DISABLE_UPSELL" +const Onboarding = "ONBOARDING" +const ChatSupport = "CHAT_SUPPORT" +const Gateway = "GATEWAY" + +var BasicPlan = basemodel.FeatureSet{ + basemodel.Feature{ + Name: SSO, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.OSS, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: DisableUpsell, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.SmartTraceDetail, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.CustomMetricsFunction, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.QueryBuilderPanels, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.QueryBuilderAlerts, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelSlack, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelWebhook, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelPagerduty, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelOpsgenie, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelEmail, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelMsTeams, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.UseSpanMetrics, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: Gateway, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, +} + +var ProPlan = basemodel.FeatureSet{ + basemodel.Feature{ + Name: SSO, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.OSS, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.SmartTraceDetail, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.CustomMetricsFunction, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.QueryBuilderPanels, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.QueryBuilderAlerts, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelSlack, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelWebhook, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelPagerduty, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelOpsgenie, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelEmail, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelMsTeams, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.UseSpanMetrics, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: Gateway, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, +} + +var EnterprisePlan = basemodel.FeatureSet{ + basemodel.Feature{ + Name: SSO, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.OSS, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.SmartTraceDetail, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.CustomMetricsFunction, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.QueryBuilderPanels, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.QueryBuilderAlerts, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelSlack, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelWebhook, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelPagerduty, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelOpsgenie, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelEmail, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.AlertChannelMsTeams, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: basemodel.UseSpanMetrics, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: Onboarding, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: ChatSupport, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, + basemodel.Feature{ + Name: Gateway, + Active: true, + Usage: 0, + UsageLimit: -1, + Route: "", + }, +} diff --git a/signoz/ee/query-service/model/sso.go b/signoz/ee/query-service/model/sso.go new file mode 100644 index 0000000..8e8e847 --- /dev/null +++ b/signoz/ee/query-service/model/sso.go @@ -0,0 +1,68 @@ +package model + +import ( + "fmt" + "context" + "net/url" + "golang.org/x/oauth2" + "github.com/coreos/go-oidc/v3/oidc" + "go.signoz.io/signoz/ee/query-service/sso" +) + +// SamlConfig contans SAML params to generate and respond to the requests +// from SAML provider +type SamlConfig struct { + SamlEntity string `json:"samlEntity"` + SamlIdp string `json:"samlIdp"` + SamlCert string `json:"samlCert"` +} + +// GoogleOauthConfig contains a generic config to support oauth +type GoogleOAuthConfig struct { + ClientID string `json:"clientId"` + ClientSecret string `json:"clientSecret"` + RedirectURI string `json:"redirectURI"` +} + + +const ( + googleIssuerURL = "https://accounts.google.com" +) + +func (g *GoogleOAuthConfig) GetProvider(domain string, siteUrl *url.URL) (sso.OAuthCallbackProvider, error) { + + ctx, cancel := context.WithCancel(context.Background()) + + provider, err := oidc.NewProvider(ctx, googleIssuerURL) + if err != nil { + cancel() + return nil, fmt.Errorf("failed to get provider: %v", err) + } + + // default to email and profile scope as we just use google auth + // to verify identity and start a session. + scopes := []string{"email"} + + // this is the url google will call after login completion + redirectURL := fmt.Sprintf("%s://%s/%s", + siteUrl.Scheme, + siteUrl.Host, + "api/v1/complete/google") + + return &sso.GoogleOAuthProvider{ + RedirectURI: g.RedirectURI, + OAuth2Config: &oauth2.Config{ + ClientID: g.ClientID, + ClientSecret: g.ClientSecret, + Endpoint: provider.Endpoint(), + Scopes: scopes, + RedirectURL: redirectURL, + }, + Verifier: provider.Verifier( + &oidc.Config{ClientID: g.ClientID}, + ), + Cancel: cancel, + HostedDomain: domain, + }, nil +} + diff --git a/signoz/ee/query-service/model/trace.go b/signoz/ee/query-service/model/trace.go new file mode 100644 index 0000000..708d6d1 --- /dev/null +++ b/signoz/ee/query-service/model/trace.go @@ -0,0 +1,22 @@ +package model + +type SpanForTraceDetails struct { + TimeUnixNano uint64 `json:"timestamp"` + SpanID string `json:"spanID"` + TraceID string `json:"traceID"` + ParentID string `json:"parentID"` + ParentSpan *SpanForTraceDetails `json:"parentSpan"` + ServiceName string `json:"serviceName"` + Name string `json:"name"` + Kind int32 `json:"kind"` + DurationNano int64 `json:"durationNano"` + TagMap map[string]string `json:"tagMap"` + Events []string `json:"event"` + HasError bool `json:"hasError"` + Children []*SpanForTraceDetails `json:"children"` +} + +type GetSpansSubQueryDBResponse struct { + SpanID string `ch:"spanID"` + TraceID string `ch:"traceID"` +} diff --git a/signoz/ee/query-service/model/usage.go b/signoz/ee/query-service/model/usage.go new file mode 100644 index 0000000..3cedb05 --- /dev/null +++ b/signoz/ee/query-service/model/usage.go @@ -0,0 +1,34 @@ +package model + +import ( + "time" + + "github.com/google/uuid" +) + +type UsagePayload struct { + InstallationId uuid.UUID `json:"installationId"` + LicenseKey uuid.UUID `json:"licenseKey"` + Usage []Usage `json:"usage"` +} + +type Usage struct { + CollectorID string `json:"collectorId"` + ExporterID string `json:"exporterId"` + Type string `json:"type"` + Tenant string `json:"tenant"` + TimeStamp time.Time `json:"timestamp"` + Count int64 `json:"count"` + Size int64 `json:"size"` + OrgName string `json:"orgName"` + TenantId string `json:"tenantId"` +} + +type UsageDB struct { + CollectorID string `ch:"collector_id" json:"collectorId"` + ExporterID string `ch:"exporter_id" json:"exporterId"` + Type string `ch:"-" json:"type"` + TimeStamp time.Time `ch:"timestamp" json:"timestamp"` + Tenant string `ch:"tenant" json:"tenant"` + Data string `ch:"data" json:"data"` +} diff --git a/signoz/ee/query-service/sso/google.go b/signoz/ee/query-service/sso/google.go new file mode 100644 index 0000000..a27a38e --- /dev/null +++ b/signoz/ee/query-service/sso/google.go @@ -0,0 +1,92 @@ +package sso + +import ( + "fmt" + "errors" + "context" + "net/http" + "github.com/coreos/go-oidc/v3/oidc" + "golang.org/x/oauth2" +) + +type GoogleOAuthProvider struct { + RedirectURI string + OAuth2Config *oauth2.Config + Verifier *oidc.IDTokenVerifier + Cancel context.CancelFunc + HostedDomain string +} + + +func (g *GoogleOAuthProvider) BuildAuthURL(state string) (string, error) { + var opts []oauth2.AuthCodeOption + + // set hosted domain. google supports multiple hosted domains but in our case + // we have one config per host domain. + opts = append(opts, oauth2.SetAuthURLParam("hd", g.HostedDomain)) + + return g.OAuth2Config.AuthCodeURL(state, opts...), nil +} + +type oauth2Error struct{ + error string + errorDescription string +} + +func (e *oauth2Error) Error() string { + if e.errorDescription == "" { + return e.error + } + return e.error + ": " + e.errorDescription +} + +func (g *GoogleOAuthProvider) HandleCallback(r *http.Request) (identity *SSOIdentity, err error) { + q := r.URL.Query() + if errType := q.Get("error"); errType != "" { + return identity, &oauth2Error{errType, q.Get("error_description")} + } + + token, err := g.OAuth2Config.Exchange(r.Context(), q.Get("code")) + if err != nil { + return identity, fmt.Errorf("google: failed to get token: %v", err) + } + + return g.createIdentity(r.Context(), token) +} + + +func (g *GoogleOAuthProvider) createIdentity(ctx context.Context, token *oauth2.Token) (identity *SSOIdentity, err error) { + rawIDToken, ok := token.Extra("id_token").(string) + if !ok { + return identity, errors.New("google: no id_token in token response") + } + idToken, err := g.Verifier.Verify(ctx, rawIDToken) + if err != nil { + return identity, fmt.Errorf("google: failed to verify ID Token: %v", err) + } + + var claims struct { + Username string `json:"name"` + Email string `json:"email"` + EmailVerified bool `json:"email_verified"` + HostedDomain string `json:"hd"` + } + if err := idToken.Claims(&claims); err != nil { + return identity, fmt.Errorf("oidc: failed to decode claims: %v", err) + } + + if claims.HostedDomain != g.HostedDomain { + return identity, fmt.Errorf("oidc: unexpected hd claim %v", claims.HostedDomain) + } + + identity = &SSOIdentity{ + UserID: idToken.Subject, + Username: claims.Username, + Email: claims.Email, + EmailVerified: claims.EmailVerified, + ConnectorData: []byte(token.RefreshToken), + } + + return identity, nil +} + diff --git a/signoz/ee/query-service/sso/model.go b/signoz/ee/query-service/sso/model.go new file mode 100644 index 0000000..3e5f103 --- /dev/null +++ b/signoz/ee/query-service/sso/model.go @@ -0,0 +1,31 @@ +package sso + +import ( + "net/http" +) + +// SSOIdentity contains details of user received from SSO provider +type SSOIdentity struct { + UserID string + Username string + PreferredUsername string + Email string + EmailVerified bool + ConnectorData []byte +} + +// OAuthCallbackProvider is an interface implemented by connectors which use an OAuth +// style redirect flow to determine user information. +type OAuthCallbackProvider interface { + // The initial URL user would be redirect to. + // OAuth2 implementations support various scopes but we only need profile and user as + // the roles are still being managed in SigNoz. + BuildAuthURL(state string) (string, error) + + // Handle the callback to the server (after login at oauth provider site) + // and return a email identity. + // At the moment we dont support auto signup flow (based on domain), so + // the full identity (including name, group etc) is not required outside of the + // connector + HandleCallback(r *http.Request) (identity *SSOIdentity, err error) +} diff --git a/signoz/ee/query-service/sso/saml/request.go b/signoz/ee/query-service/sso/saml/request.go new file mode 100644 index 0000000..c9788d0 --- /dev/null +++ b/signoz/ee/query-service/sso/saml/request.go @@ -0,0 +1,107 @@ +package saml + +import ( + "crypto/x509" + "encoding/base64" + "encoding/pem" + "fmt" + "strings" + + saml2 "github.com/russellhaering/gosaml2" + dsig "github.com/russellhaering/goxmldsig" + "go.signoz.io/signoz/pkg/query-service/constants" + "go.uber.org/zap" +) + +func LoadCertificateStore(certString string) (dsig.X509CertificateStore, error) { + certStore := &dsig.MemoryX509CertificateStore{ + Roots: []*x509.Certificate{}, + } + + certData, err := base64.StdEncoding.DecodeString(certString) + if err != nil { + return certStore, fmt.Errorf(fmt.Sprintf("failed to read certificate: %v", err)) + } + + idpCert, err := x509.ParseCertificate(certData) + if err != nil { + return certStore, fmt.Errorf(fmt.Sprintf("failed to prepare saml request, invalid cert: %s", err.Error())) + } + + certStore.Roots = append(certStore.Roots, idpCert) + + return certStore, nil +} + +func LoadCertFromPem(certString string) (dsig.X509CertificateStore, error) { + certStore := &dsig.MemoryX509CertificateStore{ + Roots: []*x509.Certificate{}, + } + + block, _ := pem.Decode([]byte(certString)) + if block == nil { + return certStore, fmt.Errorf("no valid pem cert found") + } + + idpCert, err := x509.ParseCertificate(block.Bytes) + if err != nil { + return certStore, fmt.Errorf(fmt.Sprintf("failed to parse pem cert: %s", err.Error())) + } + + certStore.Roots = append(certStore.Roots, idpCert) + + return certStore, nil +} + +// PrepareRequest prepares authorization URL (Idp Provider URL) +func PrepareRequest(issuer, acsUrl, audience, entity, idp, certString string) (*saml2.SAMLServiceProvider, error) { + var certStore dsig.X509CertificateStore + if certString == "" { + return nil, fmt.Errorf("invalid certificate data") + } + + var err error + if strings.Contains(certString, "-----BEGIN CERTIFICATE-----") { + certStore, err = LoadCertFromPem(certString) + } else { + certStore, err = LoadCertificateStore(certString) + } + // certificate store can not be created, throw error + if err != nil { + return nil, err + } + + randomKeyStore := dsig.RandomKeyStoreForTest() + + // SIGNOZ_SAML_RETURN_URL env var would support overriding window.location + // as return destination after saml request is complete from IdP side. + // this var is also useful for development, as it is easy to override with backend endpoint + // e.g. http://localhost:8080/api/v1/complete/saml + acsUrl = constants.GetOrDefaultEnv("SIGNOZ_SAML_RETURN_URL", acsUrl) + + sp := &saml2.SAMLServiceProvider{ + IdentityProviderSSOURL: idp, + IdentityProviderIssuer: entity, + ServiceProviderIssuer: issuer, + AssertionConsumerServiceURL: acsUrl, + SignAuthnRequests: true, + AllowMissingAttributes: true, + + // about cert stores -sender(signoz app) and receiver (idp) + // The random key (random key store) is sender cert. The public cert store(IDPCertificateStore) that you see on org domain is receiver cert (idp provided). + // At the moment, the library we use doesn't bother about sender cert and IdP too. It just adds additional layer of security, which we can explore in future versions + // The receiver (Idp) cert will be different for each org domain. Imagine cloud setup where each company setups their domain that integrates with their Idp. + // @signoz.io + // @next.io + // Each of above will have their own Idp setup and hence separate public cert to decrypt the response. + // The way SAML request travels is - + // SigNoz Backend -> IdP Login Screen -> SigNoz Backend -> SigNoz Frontend + // ---------------- | -------------------| ------------------------------------- + // The dotted lines indicate request boundries. So if you notice, the response from Idp starts a new request. hence we need relay state to pass the context around. + + IDPCertificateStore: certStore, + SPKeyStore: randomKeyStore, + } + zap.L().Debug("SAML request", zap.Any("sp", sp)) + return sp, nil +} diff --git a/signoz/ee/query-service/usage/manager.go b/signoz/ee/query-service/usage/manager.go new file mode 100644 index 0000000..d52d5ad --- /dev/null +++ b/signoz/ee/query-service/usage/manager.go @@ -0,0 +1,209 @@ +package usage + +import ( + "context" + "encoding/json" + "fmt" + "os" + "regexp" + "strings" + "sync/atomic" + "time" + + "github.com/ClickHouse/clickhouse-go/v2" + "github.com/go-co-op/gocron" + "github.com/google/uuid" + + "go.uber.org/zap" + + "go.signoz.io/signoz/ee/query-service/dao" + licenseserver "go.signoz.io/signoz/ee/query-service/integrations/signozio" + "go.signoz.io/signoz/ee/query-service/license" + "go.signoz.io/signoz/ee/query-service/model" + "go.signoz.io/signoz/pkg/query-service/utils/encryption" +) + +const ( + MaxRetries = 3 + RetryInterval = 5 * time.Second + stateUnlocked uint32 = 0 + stateLocked uint32 = 1 +) + +var ( + locker = stateUnlocked +) + +type Manager struct { + clickhouseConn clickhouse.Conn + + licenseRepo *license.Repo + + scheduler *gocron.Scheduler + + modelDao dao.ModelDao + + tenantID string +} + +func New(dbType string, modelDao dao.ModelDao, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn) (*Manager, error) { + hostNameRegex := regexp.MustCompile(`tcp://(?P.*):`) + hostNameRegexMatches := hostNameRegex.FindStringSubmatch(os.Getenv("ClickHouseUrl")) + + tenantID := "" + if len(hostNameRegexMatches) == 2 { + tenantID = hostNameRegexMatches[1] + tenantID = strings.TrimSuffix(tenantID, "-clickhouse") + } + + m := &Manager{ + // repository: repo, + clickhouseConn: clickhouseConn, + licenseRepo: licenseRepo, + scheduler: gocron.NewScheduler(time.UTC).Every(1).Day().At("00:00"), // send usage every at 00:00 UTC + modelDao: modelDao, + tenantID: tenantID, + } + return m, nil +} + +// start loads collects and exports any exported snapshot and starts the exporter +func (lm *Manager) Start() error { + // compares the locker and stateUnlocked if both are same lock is applied else returns error + if !atomic.CompareAndSwapUint32(&locker, stateUnlocked, stateLocked) { + return fmt.Errorf("usage exporter is locked") + } + + _, err := lm.scheduler.Do(func() { lm.UploadUsage() }) + if err != nil { + return err + } + + // upload usage once when starting the service + lm.UploadUsage() + + lm.scheduler.StartAsync() + + return nil +} +func (lm *Manager) UploadUsage() { + ctx := context.Background() + // check if license is present or not + license, err := lm.licenseRepo.GetActiveLicense(ctx) + if err != nil { + zap.L().Error("failed to get active license", zap.Error(err)) + return + } + if license == nil { + // we will not start the usage reporting if license is not present. + zap.L().Info("no license present, skipping usage reporting") + return + } + + usages := []model.UsageDB{} + + // get usage from clickhouse + dbs := []string{"signoz_logs", "signoz_traces", "signoz_metrics"} + query := ` + SELECT tenant, collector_id, exporter_id, timestamp, data + FROM %s.distributed_usage as u1 + GLOBAL INNER JOIN + (SELECT + tenant, collector_id, exporter_id, MAX(timestamp) as ts + FROM %s.distributed_usage as u2 + where timestamp >= $1 + GROUP BY tenant, collector_id, exporter_id + ) as t1 + ON + u1.tenant = t1.tenant AND u1.collector_id = t1.collector_id AND u1.exporter_id = t1.exporter_id and u1.timestamp = t1.ts + order by timestamp + ` + + for _, db := range dbs { + dbusages := []model.UsageDB{} + err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour))) + if err != nil && !strings.Contains(err.Error(), "doesn't exist") { + zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err)) + return + } + for _, u := range dbusages { + u.Type = db + usages = append(usages, u) + } + } + + if len(usages) <= 0 { + zap.L().Info("no snapshots to upload, skipping.") + return + } + + zap.L().Info("uploading usage data") + + orgName := "" + orgNames, orgError := lm.modelDao.GetOrgs(ctx) + if orgError != nil { + zap.L().Error("failed to get org data: %v", zap.Error(orgError)) + } + if len(orgNames) == 1 { + orgName = orgNames[0].Name + } + + usagesPayload := []model.Usage{} + for _, usage := range usages { + usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data)) + if err != nil { + zap.L().Error("error while decrypting usage data: %v", zap.Error(err)) + return + } + + usageData := model.Usage{} + err = json.Unmarshal(usageDataBytes, &usageData) + if err != nil { + zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err)) + return + } + + usageData.CollectorID = usage.CollectorID + usageData.ExporterID = usage.ExporterID + usageData.Type = usage.Type + usageData.Tenant = usage.Tenant + usageData.OrgName = orgName + usageData.TenantId = lm.tenantID + usagesPayload = append(usagesPayload, usageData) + } + + key, _ := uuid.Parse(license.Key) + payload := model.UsagePayload{ + LicenseKey: key, + Usage: usagesPayload, + } + lm.UploadUsageWithExponentalBackOff(ctx, payload) +} + +func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload model.UsagePayload) { + for i := 1; i <= MaxRetries; i++ { + apiErr := licenseserver.SendUsage(ctx, payload) + if apiErr != nil && i == MaxRetries { + zap.L().Error("retries stopped : %v", zap.Error(apiErr)) + // not returning error here since it is captured in the failed count + return + } else if apiErr != nil { + // sleeping for exponential backoff + sleepDuration := RetryInterval * time.Duration(i) + zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Error(apiErr.Err)) + time.Sleep(sleepDuration) + } else { + break + } + } +} + +func (lm *Manager) Stop() { + lm.scheduler.Stop() + + zap.L().Info("sending usage data before shutting down") + // send usage before shutting down + lm.UploadUsage() + + atomic.StoreUint32(&locker, stateUnlocked) +} diff --git a/signoz/frontend/.babelrc b/signoz/frontend/.babelrc new file mode 100644 index 0000000..9efe6ca --- /dev/null +++ b/signoz/frontend/.babelrc @@ -0,0 +1,16 @@ +{ + "presets": [ + "@babel/preset-env", + ["@babel/preset-react", { "runtime": "automatic" }], + "@babel/preset-typescript" + ], + "plugins": [ + "react-hot-loader/babel", + "@babel/plugin-proposal-class-properties" + ], + "env": { + "production": { + "presets": ["minify"] + } + } +} diff --git a/signoz/frontend/.dockerignore b/signoz/frontend/.dockerignore new file mode 100644 index 0000000..840adcb --- /dev/null +++ b/signoz/frontend/.dockerignore @@ -0,0 +1,3 @@ +node_modules +.vscode +.git diff --git a/signoz/frontend/.eslintignore b/signoz/frontend/.eslintignore new file mode 100644 index 0000000..402f7ae --- /dev/null +++ b/signoz/frontend/.eslintignore @@ -0,0 +1,4 @@ +node_modules +build +*.typegen.ts +i18-generate-hash.js \ No newline at end of file diff --git a/signoz/frontend/.eslintrc.js b/signoz/frontend/.eslintrc.js new file mode 100644 index 0000000..5034915 --- /dev/null +++ b/signoz/frontend/.eslintrc.js @@ -0,0 +1,121 @@ +module.exports = { + env: { + browser: true, + es2021: true, + node: true, + 'jest/globals': true, + }, + extends: [ + 'airbnb', + 'airbnb-typescript', + 'eslint:recommended', + 'plugin:react/recommended', + 'plugin:@typescript-eslint/recommended', + 'plugin:@typescript-eslint/eslint-recommended', + 'plugin:prettier/recommended', + 'plugin:sonarjs/recommended', + 'plugin:import/errors', + 'plugin:import/warnings', + 'plugin:react/jsx-runtime', + ], + parser: '@typescript-eslint/parser', + parserOptions: { + project: './tsconfig.json', + ecmaFeatures: { + jsx: true, + }, + ecmaVersion: 12, + sourceType: 'module', + }, + plugins: [ + 'react', + '@typescript-eslint', + 'simple-import-sort', + 'react-hooks', + 'prettier', + 'jest', + ], + settings: { + react: { + version: 'detect', + }, + 'import/resolver': { + node: { + paths: ['src'], + extensions: ['.js', '.jsx', '.ts', '.tsx'], + }, + }, + }, + rules: { + 'react/jsx-filename-extension': [ + 'error', + { + extensions: ['.tsx', '.js', '.jsx'], + }, + ], + 'react/prop-types': 'off', + '@typescript-eslint/explicit-function-return-type': 'error', + '@typescript-eslint/no-var-requires': 'error', + 'react/no-array-index-key': 'error', + 'linebreak-style': [ + 'error', + process.env.platform === 'win32' ? 'windows' : 'unix', + ], + '@typescript-eslint/default-param-last': 'off', + + // simple sort error + 'simple-import-sort/imports': 'error', + 'simple-import-sort/exports': 'error', + + // hooks + 'react-hooks/rules-of-hooks': 'error', + 'react-hooks/exhaustive-deps': 'error', + + // airbnb + 'no-underscore-dangle': 'off', + 'no-console': 'off', + 'import/prefer-default-export': 'off', + 'import/extensions': [ + 'error', + 'ignorePackages', + { + js: 'never', + jsx: 'never', + ts: 'never', + tsx: 'never', + }, + ], + 'import/no-extraneous-dependencies': ['error', { devDependencies: true }], + 'no-plusplus': 'off', + 'jsx-a11y/label-has-associated-control': [ + 'error', + { + required: { + some: ['nesting', 'id'], + }, + }, + ], + 'jsx-a11y/label-has-for': [ + 'error', + { + required: { + some: ['nesting', 'id'], + }, + }, + ], + '@typescript-eslint/no-unused-vars': 'error', + 'func-style': ['error', 'declaration', { allowArrowFunctions: true }], + 'arrow-body-style': ['error', 'as-needed'], + + // eslint rules need to remove + '@typescript-eslint/no-shadow': 'off', + 'import/no-cycle': 'off', + 'prettier/prettier': [ + 'error', + {}, + { + usePrettierrc: true, + }, + ], + }, +}; diff --git a/signoz/frontend/.gitignore b/signoz/frontend/.gitignore new file mode 100644 index 0000000..7d7c7a5 --- /dev/null +++ b/signoz/frontend/.gitignore @@ -0,0 +1,3 @@ + +# Sentry Config File +.env.sentry-build-plugin diff --git a/signoz/frontend/.husky/commit-msg b/signoz/frontend/.husky/commit-msg new file mode 100755 index 0000000..cb50d87 --- /dev/null +++ b/signoz/frontend/.husky/commit-msg @@ -0,0 +1,20 @@ +#!/bin/sh +. "$(dirname "$0")/_/husky.sh" + +cd frontend && yarn run commitlint --edit $1 + +branch="$(git rev-parse --abbrev-ref HEAD)" + +color_red="$(tput setaf 1)" +bold="$(tput bold)" +reset="$(tput sgr0)" + +if [ "$branch" = "main" ]; then + echo "${color_red}${bold}You can't commit directly to the main branch${reset}" + exit 1 +fi + +if [ "$branch" = "develop" ]; then + echo "${color_red}${bold}You can't commit directly to the develop branch${reset}" + exit 1 +fi \ No newline at end of file diff --git a/signoz/frontend/.husky/pre-commit b/signoz/frontend/.husky/pre-commit new file mode 100755 index 0000000..d801fab --- /dev/null +++ b/signoz/frontend/.husky/pre-commit @@ -0,0 +1,4 @@ +#!/bin/sh +. "$(dirname "$0")/_/husky.sh" + +cd frontend && yarn lint-staged diff --git a/signoz/frontend/.npmrc b/signoz/frontend/.npmrc new file mode 100644 index 0000000..31dfb02 --- /dev/null +++ b/signoz/frontend/.npmrc @@ -0,0 +1 @@ +registry = 'https://registry.npmjs.org/' \ No newline at end of file diff --git a/signoz/frontend/.nvmrc b/signoz/frontend/.nvmrc new file mode 100644 index 0000000..c818c7b --- /dev/null +++ b/signoz/frontend/.nvmrc @@ -0,0 +1 @@ +16.15.0 \ No newline at end of file diff --git a/signoz/frontend/.prettierignore b/signoz/frontend/.prettierignore new file mode 100644 index 0000000..69797b1 --- /dev/null +++ b/signoz/frontend/.prettierignore @@ -0,0 +1,6 @@ +# Ignore artifacts: +build +coverage + +# Ignore all MD files: +**/*.md \ No newline at end of file diff --git a/signoz/frontend/.prettierrc.json b/signoz/frontend/.prettierrc.json new file mode 100644 index 0000000..a147736 --- /dev/null +++ b/signoz/frontend/.prettierrc.json @@ -0,0 +1,8 @@ +{ + "trailingComma": "all", + "useTabs": true, + "tabWidth": 1, + "singleQuote": true, + "jsxSingleQuote": false, + "semi": true +} diff --git a/signoz/frontend/.yarnrc b/signoz/frontend/.yarnrc new file mode 100644 index 0000000..843c88f --- /dev/null +++ b/signoz/frontend/.yarnrc @@ -0,0 +1,2 @@ +network-timeout 600000 +save-prefix "" diff --git a/signoz/frontend/CONTRIBUTIONS.md b/signoz/frontend/CONTRIBUTIONS.md new file mode 100644 index 0000000..0b3d762 --- /dev/null +++ b/signoz/frontend/CONTRIBUTIONS.md @@ -0,0 +1,56 @@ +# **Frontend Guidelines** + +Embrace the spirit of collaboration and contribute to the success of our open-source project by adhering to these frontend development guidelines with precision and passion. + +### React and Components + +- Strive to create small and modular components, ensuring they are divided into individual pieces for improved maintainability and reusability. +- Avoid passing inline objects or functions as props to React components, as they are recreated with each render cycle. + Utilize careful memoization of functions and variables, balancing optimization efforts to prevent potential performance issues. [When to useMemo and useCallback](https://kentcdodds.com/blog/usememo-and-usecallback) by Kent C. Dodds is quite helpful for this scenario. +- Minimize the use of inline functions whenever possible to enhance code readability and improve overall comprehension. +- Employ the appropriate usage of useMemo and useCallback hooks for effective memoization of values and functions. +- Determine the appropriate placement of components: + - Pages should contain an aggregation of all components and containers. + - Commonly used components should reside in the 'components' directory. + - Parent components responsible for data manipulation should be placed in the 'container' directory. +- Strategically decide where to store data, either in global state or local components: + - Begin by storing data in local components and gradually transition to global state as necessary. +- Avoid importing default namespace `React` as the project is using `v18` and `import React from 'react'` is not needed anymore. +- When a function requires more than three arguments (except when memoized), encapsulate them within an object to enhance readability and reduce potential parameter complexity. + +### API and Services + +- Avoid incorporating business logic within API/Service files to maintain flexibility for consumers to handle it according to their specific needs. +- Employ the use of the useQuery hook for fetching data and the useMutation hook for updating data, ensuring a consistent and efficient approach. +- Utilize the useQueryClient hook when updating the cache, facilitating smooth and effective management of data within the application. + +**Note -** In our project, we are utilizing React Query v3. To gain a comprehensive understanding of its features and implementation, we recommend referring to the [official documentation](https://tanstack.com/query/v3/docs/react/overview) as a valuable resource. + +### Styling + +- Refrain from using inline styling within React components to maintain separation of concerns and promote a more maintainable codebase. +- Opt for using the rem unit instead of px values to ensure better scalability and responsiveness across different devices and screen sizes. + +### Linting and Setup + +- It is crucial to refrain from disabling ESLint and TypeScript errors within the project. If there is a specific rule that needs to be disabled, provide a clear and justified explanation for doing so. Maintaining the integrity of the linting and type-checking processes ensures code quality and consistency throughout the codebase. +- In our project, we rely on several essential ESLint plugins, namely: + - [plugin:@typescript-eslint](https://typescript-eslint.io/rules/) + - [airbnb styleguide](https://github.com/airbnb/javascript) + - [plugin:sonarjs](https://github.com/SonarSource/eslint-plugin-sonarjs) + + To ensure compliance with our coding standards and best practices, we encourage you to refer to the documentation of these plugins. Familiarizing yourself with the ESLint rules they provide will help maintain code quality and consistency throughout the project. + +### Naming Conventions + +- Ensure that component names are written in Capital Case, while the folder names should be in lowercase. +- Keep all other elements, such as variables, functions, and file names, in lowercase. + +### Miscellaneous + +- Ensure that functions are modularized and follow the Single Responsibility Principle (SRP). The function's name should accurately convey its purpose and functionality. +- Semantic division of functions into smaller units should be prioritized for improved readability and maintainability. + Aim to keep functions concise and avoid exceeding a maximum length of 40 lines to enhance code understandability and ease of maintenance. +- Eliminate the use of hard-coded strings or enums, favoring a more flexible and maintainable approach. +- Strive to internationalize all strings within the codebase to support localization and improve accessibility for users across different languages. +- Minimize the usage of multiple if statements or switch cases within a function. Consider creating a mapper and separating logic into multiple functions for better code organization. diff --git a/signoz/frontend/Dockerfile b/signoz/frontend/Dockerfile new file mode 100644 index 0000000..1d0449f --- /dev/null +++ b/signoz/frontend/Dockerfile @@ -0,0 +1,18 @@ +FROM nginx:1.26-alpine + +# Add Maintainer Info +LABEL maintainer="signoz" + +# Set working directory +WORKDIR /frontend + +# Remove default nginx index page +RUN rm -rf /usr/share/nginx/html/* + +# Copy custom nginx config and static files +COPY conf/default.conf /etc/nginx/conf.d/default.conf +COPY build /usr/share/nginx/html + +EXPOSE 3301 + +ENTRYPOINT ["nginx", "-g", "daemon off;"] diff --git a/signoz/frontend/README.md b/signoz/frontend/README.md new file mode 100644 index 0000000..99a3671 --- /dev/null +++ b/signoz/frontend/README.md @@ -0,0 +1,105 @@ +# Configuring Over Local +1. Docker +1. Without Docker + +## With Docker + +**Building image** + +``docker-compose up` +/ This will also run + +or +`docker build . -t tagname` + +**Tag to remote url- Introduce versinoing later on** + +``` +docker tag signoz/frontend:latest 7296823551/signoz:latest +``` + +``` +docker-compose up +``` + +## Without Docker +Follow the steps below + +1. ```git clone https://github.com/SigNoz/signoz.git && cd signoz/frontend``` +1. change baseURL to `````` in file ```src/constants/env.ts``` + +1. ```yarn install``` +1. ```yarn dev``` + +```Note: Please ping us in #contributing channel in our slack community and we will DM you with ``` + +# Getting Started with Create React App + +This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). + +## Available Scripts + +In the project directory, you can run: + +### `yarn start` + +Runs the app in the development mode.\ +Open [http://localhost:3301](http://localhost:3301) to view it in the browser. + +The page will reload if you make edits.\ +You will also see any lint errors in the console. + +### `yarn test` + +Launches the test runner in the interactive watch mode.\ +See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information. + +### `yarn build` + +Builds the app for production to the `build` folder.\ +It correctly bundles React in production mode and optimizes the build for the best performance. + +The build is minified and the filenames include the hashes.\ +Your app is ready to be deployed! + +See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information. + +### `yarn eject` + +**Note: this is a one-way operation. Once you `eject`, you can’t go back!** + +If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project. + +Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own. + +You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it. + +## Learn More + +You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started). + +To learn React, check out the [React documentation](https://reactjs.org/). + +### Code Splitting + +This section has moved here: [https://facebook.github.io/create-react-app/docs/code-splitting](https://facebook.github.io/create-react-app/docs/code-splitting) + +### Analyzing the Bundle Size + +This section has moved here: [https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size](https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size) + +### Making a Progressive Web App + +This section has moved here: [https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app](https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app) + +### Advanced Configuration + +This section has moved here: [https://facebook.github.io/create-react-app/docs/advanced-configuration](https://facebook.github.io/create-react-app/docs/advanced-configuration) + +### Deployment + +This section has moved here: [https://facebook.github.io/create-react-app/docs/deployment](https://facebook.github.io/create-react-app/docs/deployment) + +### `yarn build` fails to minify + +This section has moved here: [https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify](https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify) diff --git a/signoz/frontend/__mocks__/cssMock.ts b/signoz/frontend/__mocks__/cssMock.ts new file mode 100644 index 0000000..ff8b4c5 --- /dev/null +++ b/signoz/frontend/__mocks__/cssMock.ts @@ -0,0 +1 @@ +export default {}; diff --git a/signoz/frontend/babel.config.js b/signoz/frontend/babel.config.js new file mode 100644 index 0000000..b3df57d --- /dev/null +++ b/signoz/frontend/babel.config.js @@ -0,0 +1,6 @@ +module.exports = { + presets: [ + ['@babel/preset-env', { targets: { node: 'current' } }], + '@babel/preset-typescript', + ], +}; diff --git a/signoz/frontend/bundlesize.config.json b/signoz/frontend/bundlesize.config.json new file mode 100644 index 0000000..1467ff0 --- /dev/null +++ b/signoz/frontend/bundlesize.config.json @@ -0,0 +1,8 @@ +{ + "files": [ + { + "path": "./build/**.js", + "maxSize": "1.2MB" + } + ] +} diff --git a/signoz/frontend/commitlint.config.ts b/signoz/frontend/commitlint.config.ts new file mode 100644 index 0000000..3f5e287 --- /dev/null +++ b/signoz/frontend/commitlint.config.ts @@ -0,0 +1 @@ +export default { extends: ['@commitlint/config-conventional'] }; diff --git a/signoz/frontend/conf/default.conf b/signoz/frontend/conf/default.conf new file mode 100644 index 0000000..8c1eafe --- /dev/null +++ b/signoz/frontend/conf/default.conf @@ -0,0 +1,33 @@ +server { + listen 3301; + server_name _; + + gzip on; + gzip_static on; + gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript; + gzip_proxied any; + gzip_vary on; + gzip_comp_level 6; + gzip_buffers 16 8k; + gzip_http_version 1.1; + + # to handle uri issue 414 from nginx + client_max_body_size 24M; + large_client_header_buffers 8 128k; + + location / { + root /usr/share/nginx/html; + index index.html index.htm; + try_files $uri $uri/ /index.html; + } + location = /api { + proxy_pass http://signoz-query-service:8080/api; + } + + # redirect server error pages to the static page /50x.html + # + error_page 500 502 503 504 /50x.html; + location = /50x.html { + root /usr/share/nginx/html; + } +} \ No newline at end of file diff --git a/signoz/frontend/docker-compose.yml b/signoz/frontend/docker-compose.yml new file mode 100644 index 0000000..8bc085d --- /dev/null +++ b/signoz/frontend/docker-compose.yml @@ -0,0 +1,7 @@ +version: "3.9" +services: + web: + build: . + image: signoz/frontend:latest + ports: + - "3301:3301" diff --git a/signoz/frontend/example.env b/signoz/frontend/example.env new file mode 100644 index 0000000..0ddbfb2 --- /dev/null +++ b/signoz/frontend/example.env @@ -0,0 +1,7 @@ +NODE_ENV="development" +BUNDLE_ANALYSER="true" +FRONTEND_API_ENDPOINT="http://localhost:3301/" +INTERCOM_APP_ID="intercom-app-id" + +PLAYWRIGHT_TEST_BASE_URL="http://localhost:3301" +CI="1" \ No newline at end of file diff --git a/signoz/frontend/i18-generate-hash.js b/signoz/frontend/i18-generate-hash.js new file mode 100644 index 0000000..cbc03f9 --- /dev/null +++ b/signoz/frontend/i18-generate-hash.js @@ -0,0 +1,20 @@ +const crypto = require('crypto'); +const fs = require('fs'); +const glob = require('glob'); + +function generateChecksum(str, algorithm, encoding) { + return crypto + .createHash(algorithm || 'md5') + .update(str, 'utf8') + .digest(encoding || 'hex'); +} + +const result = {}; + +glob.sync(`public/locales/**/*.json`).forEach((path) => { + const [_, lang] = path.split('public/locales'); + const content = fs.readFileSync(path, { encoding: 'utf-8' }); + result[lang.replace('.json', '')] = generateChecksum(content); +}); + +fs.writeFileSync('./i18n-translations-hash.json', JSON.stringify(result)); diff --git a/signoz/frontend/jest.config.ts b/signoz/frontend/jest.config.ts new file mode 100644 index 0000000..122b309 --- /dev/null +++ b/signoz/frontend/jest.config.ts @@ -0,0 +1,48 @@ +import type { Config } from '@jest/types'; + +const config: Config.InitialOptions = { + clearMocks: true, + coverageDirectory: 'coverage', + coverageReporters: ['text', 'cobertura', 'html', 'json-summary'], + collectCoverageFrom: ['src/**/*.{ts,tsx}'], + moduleFileExtensions: ['ts', 'tsx', 'js', 'json'], + modulePathIgnorePatterns: ['dist'], + moduleNameMapper: { + '\\.(css|less|scss)$': '/__mocks__/cssMock.ts', + '\\.md$': '/__mocks__/cssMock.ts', + }, + globals: { + extensionsToTreatAsEsm: ['.ts'], + 'ts-jest': { + useESM: true, + }, + }, + testMatch: ['/src/**/*?(*.)(test).(ts|js)?(x)'], + preset: 'ts-jest/presets/js-with-ts-esm', + transform: { + '^.+\\.(ts|tsx)?$': 'ts-jest', + '^.+\\.(js|jsx)$': 'babel-jest', + }, + transformIgnorePatterns: [ + 'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|d3-interpolate|d3-color)/)', + ], + setupFilesAfterEnv: ['jest.setup.ts'], + testPathIgnorePatterns: ['/node_modules/', '/public/'], + moduleDirectories: ['node_modules', 'src'], + testEnvironment: 'jest-environment-jsdom', + testEnvironmentOptions: { + 'jest-playwright': { + browsers: ['chromium', 'firefox', 'webkit'], + }, + }, + coverageThreshold: { + global: { + statements: 80, + branches: 65, + functions: 80, + lines: 80, + }, + }, +}; + +export default config; diff --git a/signoz/frontend/jest.setup.ts b/signoz/frontend/jest.setup.ts new file mode 100644 index 0000000..4c3aad2 --- /dev/null +++ b/signoz/frontend/jest.setup.ts @@ -0,0 +1,30 @@ +/* eslint-disable @typescript-eslint/explicit-function-return-type */ +/* eslint-disable object-shorthand */ +/* eslint-disable func-names */ + +/** + * Adds custom matchers from the react testing library to all tests + */ +import '@testing-library/jest-dom'; +import 'jest-styled-components'; +import './src/styles.scss'; + +import { server } from './src/mocks-server/server'; +// Establish API mocking before all tests. + +// Mock window.matchMedia +window.matchMedia = + window.matchMedia || + function (): any { + return { + matches: false, + addListener: function () {}, + removeListener: function () {}, + }; + }; + +beforeAll(() => server.listen()); + +afterEach(() => server.resetHandlers()); + +afterAll(() => server.close()); diff --git a/signoz/frontend/package.json b/signoz/frontend/package.json new file mode 100644 index 0000000..34e08ea --- /dev/null +++ b/signoz/frontend/package.json @@ -0,0 +1,245 @@ +{ + "name": "frontend", + "version": "1.0.0", + "description": "", + "main": "webpack.config.js", + "scripts": { + "i18n:generate-hash": "node ./i18-generate-hash.js", + "dev": "npm run i18n:generate-hash && cross-env NODE_ENV=development webpack serve --progress", + "build": "npm run i18n:generate-hash && webpack --config=webpack.config.prod.js --progress", + "prettify": "prettier --write .", + "lint": "npm run i18n:generate-hash && eslint ./src", + "lint:fix": "npm run i18n:generate-hash && eslint ./src --fix", + "jest": "jest", + "jest:coverage": "jest --coverage", + "jest:watch": "jest --watch", + "postinstall": "is-ci || yarn husky:configure", + "playwright": "npm run i18n:generate-hash && NODE_ENV=testing playwright test --config=./playwright.config.ts", + "playwright:local:debug": "PWDEBUG=console yarn playwright --headed --browser=chromium", + "playwright:codegen:local": "playwright codegen http://localhost:3301", + "playwright:codegen:local:auth": "yarn playwright:codegen:local --load-storage=tests/auth.json", + "husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*", + "commitlint": "commitlint --edit $1", + "test": "jest --coverage", + "test:changedsince": "jest --changedSince=develop --coverage --silent" + }, + "engines": { + "node": ">=16.15.0" + }, + "author": "", + "license": "ISC", + "dependencies": { + "@ant-design/colors": "6.0.0", + "@ant-design/icons": "4.8.0", + "@dnd-kit/core": "6.1.0", + "@dnd-kit/modifiers": "7.0.0", + "@dnd-kit/sortable": "8.0.0", + "@grafana/data": "^9.5.2", + "@mdx-js/loader": "2.3.0", + "@mdx-js/react": "2.3.0", + "@monaco-editor/react": "^4.3.1", + "@radix-ui/react-tabs": "1.0.4", + "@radix-ui/react-tooltip": "1.0.7", + "@sentry/react": "7.102.1", + "@sentry/webpack-plugin": "2.16.0", + "@signozhq/design-tokens": "0.0.8", + "@uiw/react-md-editor": "3.23.5", + "@visx/group": "3.3.0", + "@visx/shape": "3.5.0", + "@visx/tooltip": "3.3.0", + "@xstate/react": "^3.0.0", + "ansi-to-html": "0.7.2", + "antd": "5.11.0", + "antd-table-saveas-excel": "2.2.1", + "axios": "1.6.4", + "babel-eslint": "^10.1.0", + "babel-jest": "^29.6.4", + "babel-loader": "9.1.3", + "babel-plugin-named-asset-import": "^0.3.7", + "babel-preset-minify": "^0.5.1", + "babel-preset-react-app": "^10.0.1", + "chart.js": "3.9.1", + "chartjs-adapter-date-fns": "^2.0.0", + "chartjs-plugin-annotation": "^1.4.0", + "classnames": "2.3.2", + "color": "^4.2.1", + "color-alpha": "1.1.3", + "cross-env": "^7.0.3", + "css-loader": "5.0.0", + "css-minimizer-webpack-plugin": "5.0.1", + "dayjs": "^1.10.7", + "dompurify": "3.0.0", + "dotenv": "8.2.0", + "event-source-polyfill": "1.0.31", + "eventemitter3": "5.0.1", + "file-loader": "6.1.1", + "fontfaceobserver": "2.3.0", + "history": "4.10.1", + "html-webpack-plugin": "5.5.0", + "http-proxy-middleware": "2.0.6", + "i18next": "^21.6.12", + "i18next-browser-languagedetector": "^6.1.3", + "i18next-http-backend": "^1.3.2", + "jest": "^27.5.1", + "js-base64": "^3.7.2", + "less": "^4.1.2", + "less-loader": "^10.2.0", + "lodash-es": "^4.17.21", + "lucide-react": "0.379.0", + "mini-css-extract-plugin": "2.4.5", + "papaparse": "5.4.1", + "posthog-js": "1.142.1", + "rc-tween-one": "3.0.6", + "react": "18.2.0", + "react-addons-update": "15.6.3", + "react-beautiful-dnd": "13.1.1", + "react-dnd": "16.0.1", + "react-dnd-html5-backend": "16.0.1", + "react-dom": "18.2.0", + "react-drag-listview": "2.0.0", + "react-error-boundary": "4.0.11", + "react-force-graph": "^1.43.0", + "react-full-screen": "1.1.1", + "react-grid-layout": "^1.3.4", + "react-helmet-async": "1.3.0", + "react-i18next": "^11.16.1", + "react-markdown": "8.0.7", + "react-query": "3.39.3", + "react-redux": "^7.2.2", + "react-router-dom": "^5.2.0", + "react-syntax-highlighter": "15.5.0", + "react-use": "^17.3.2", + "react-virtuoso": "4.0.3", + "overlayscrollbars-react": "^0.5.6", + "overlayscrollbars": "^2.8.1", + "redux": "^4.0.5", + "redux-thunk": "^2.3.0", + "rehype-raw": "7.0.0", + "stream": "^0.0.2", + "style-loader": "1.3.0", + "styled-components": "^5.3.11", + "terser-webpack-plugin": "^5.2.5", + "timestamp-nano": "^1.0.0", + "ts-node": "^10.2.1", + "tsconfig-paths-webpack-plugin": "^3.5.1", + "typescript": "^4.0.5", + "uplot": "1.6.26", + "uuid": "^8.3.2", + "web-vitals": "^0.2.4", + "webpack": "5.88.2", + "webpack-dev-server": "^4.15.1", + "webpack-retry-chunk-load-plugin": "3.1.1", + "xstate": "^4.31.0" + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + }, + "devDependencies": { + "@babel/core": "^7.22.11", + "@babel/plugin-proposal-class-properties": "^7.18.6", + "@babel/plugin-syntax-jsx": "^7.12.13", + "@babel/preset-env": "^7.22.14", + "@babel/preset-react": "^7.12.13", + "@babel/preset-typescript": "^7.21.4", + "@commitlint/cli": "^16.3.0", + "@commitlint/config-conventional": "^16.2.4", + "@jest/globals": "^27.5.1", + "@playwright/test": "^1.22.0", + "@testing-library/jest-dom": "5.16.5", + "@testing-library/react": "13.4.0", + "@testing-library/user-event": "14.4.3", + "@types/color": "^3.0.3", + "@types/compression-webpack-plugin": "^9.0.0", + "@types/copy-webpack-plugin": "^8.0.1", + "@types/dompurify": "^2.4.0", + "@types/event-source-polyfill": "^1.0.0", + "@types/fontfaceobserver": "2.1.0", + "@types/jest": "^27.5.1", + "@types/lodash-es": "^4.17.4", + "@types/mini-css-extract-plugin": "^2.5.1", + "@types/node": "^16.10.3", + "@types/papaparse": "5.3.7", + "@types/react": "18.0.26", + "@types/react-addons-update": "0.14.21", + "@types/react-beautiful-dnd": "13.1.8", + "@types/react-dom": "18.0.10", + "@types/react-grid-layout": "^1.1.2", + "@types/react-helmet-async": "1.0.3", + "@types/react-redux": "^7.1.11", + "@types/react-resizable": "3.0.3", + "@types/react-router-dom": "^5.1.6", + "@types/react-syntax-highlighter": "15.5.7", + "@types/redux-mock-store": "1.0.4", + "@types/styled-components": "^5.1.4", + "@types/uuid": "^8.3.1", + "@types/webpack": "^5.28.0", + "@types/webpack-dev-server": "^4.7.2", + "@typescript-eslint/eslint-plugin": "^4.33.0", + "@typescript-eslint/parser": "^4.33.0", + "autoprefixer": "10.4.19", + "babel-plugin-styled-components": "^1.12.0", + "compression-webpack-plugin": "9.0.0", + "copy-webpack-plugin": "^8.1.0", + "critters-webpack-plugin": "^3.0.1", + "eslint": "^7.32.0", + "eslint-config-airbnb": "^19.0.4", + "eslint-config-airbnb-typescript": "^16.1.4", + "eslint-config-prettier": "^8.3.0", + "eslint-config-standard": "^16.0.3", + "eslint-plugin-import": "^2.28.1", + "eslint-plugin-jest": "^26.9.0", + "eslint-plugin-jsx-a11y": "^6.5.1", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-prettier": "^4.0.0", + "eslint-plugin-promise": "^5.1.0", + "eslint-plugin-react": "^7.24.0", + "eslint-plugin-react-hooks": "^4.3.0", + "eslint-plugin-simple-import-sort": "^7.0.0", + "eslint-plugin-sonarjs": "^0.12.0", + "husky": "^7.0.4", + "is-ci": "^3.0.1", + "jest-playwright-preset": "^1.7.2", + "jest-styled-components": "^7.0.8", + "lint-staged": "^12.5.0", + "msw": "1.3.2", + "npm-run-all": "latest", + "portfinder-sync": "^0.0.2", + "postcss": "8.4.38", + "prettier": "2.2.1", + "raw-loader": "4.0.2", + "react-hooks-testing-library": "0.6.0", + "react-hot-loader": "^4.13.0", + "react-resizable": "3.0.4", + "redux-mock-store": "1.5.4", + "sass": "1.66.1", + "sass-loader": "13.3.2", + "ts-jest": "^27.1.5", + "ts-node": "^10.2.1", + "typescript-plugin-css-modules": "5.0.1", + "webpack-bundle-analyzer": "^4.5.0", + "webpack-cli": "^4.9.2" + }, + "lint-staged": { + "*.(js|jsx|ts|tsx)": [ + "eslint --fix", + "sh scripts/typecheck-staged.sh" + ] + }, + "resolutions": { + "@types/react": "18.0.26", + "@types/react-dom": "18.0.10", + "debug": "4.3.4", + "semver": "7.5.4", + "xml2js": "0.5.0", + "phin": "^3.7.1" + } +} diff --git a/signoz/frontend/playwright.config.ts b/signoz/frontend/playwright.config.ts new file mode 100644 index 0000000..0b24052 --- /dev/null +++ b/signoz/frontend/playwright.config.ts @@ -0,0 +1,23 @@ +import { PlaywrightTestConfig } from '@playwright/test'; +import dotenv from 'dotenv'; + +dotenv.config(); + +const config: PlaywrightTestConfig = { + forbidOnly: !!process.env.CI, + retries: process.env.CI ? 2 : 0, + preserveOutput: 'always', + name: 'Signoz', + testDir: './tests', + use: { + trace: 'retain-on-failure', + baseURL: process.env.PLAYWRIGHT_TEST_BASE_URL || 'http://localhost:3301', + }, + updateSnapshots: 'all', + fullyParallel: !!process.env.CI, + quiet: false, + testMatch: ['**/*.spec.ts'], + reporter: process.env.CI ? 'github' : 'list', +}; + +export default config; diff --git a/signoz/frontend/public/Icons/alert_emoji.svg b/signoz/frontend/public/Icons/alert_emoji.svg new file mode 100644 index 0000000..70f9091 --- /dev/null +++ b/signoz/frontend/public/Icons/alert_emoji.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/signoz/frontend/public/Icons/awwSnap.svg b/signoz/frontend/public/Icons/awwSnap.svg new file mode 100644 index 0000000..19088a1 --- /dev/null +++ b/signoz/frontend/public/Icons/awwSnap.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Icons/cable-car.svg b/signoz/frontend/public/Icons/cable-car.svg new file mode 100644 index 0000000..0c7318d --- /dev/null +++ b/signoz/frontend/public/Icons/cable-car.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Icons/configure.svg b/signoz/frontend/public/Icons/configure.svg new file mode 100644 index 0000000..088dfa9 --- /dev/null +++ b/signoz/frontend/public/Icons/configure.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Icons/dashboard_emoji.svg b/signoz/frontend/public/Icons/dashboard_emoji.svg new file mode 100644 index 0000000..67d99d6 --- /dev/null +++ b/signoz/frontend/public/Icons/dashboard_emoji.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Icons/dashboards.svg b/signoz/frontend/public/Icons/dashboards.svg new file mode 100644 index 0000000..88386b1 --- /dev/null +++ b/signoz/frontend/public/Icons/dashboards.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Icons/emptyState.svg b/signoz/frontend/public/Icons/emptyState.svg new file mode 100644 index 0000000..b00fbb6 --- /dev/null +++ b/signoz/frontend/public/Icons/emptyState.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Icons/group.svg b/signoz/frontend/public/Icons/group.svg new file mode 100644 index 0000000..e293ceb --- /dev/null +++ b/signoz/frontend/public/Icons/group.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Icons/landscape.svg b/signoz/frontend/public/Icons/landscape.svg new file mode 100644 index 0000000..762d345 --- /dev/null +++ b/signoz/frontend/public/Icons/landscape.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Icons/loading-plane.gif b/signoz/frontend/public/Icons/loading-plane.gif new file mode 100644 index 0000000..9d58177 Binary files /dev/null and b/signoz/frontend/public/Icons/loading-plane.gif differ diff --git a/signoz/frontend/public/Icons/redis-logo.svg b/signoz/frontend/public/Icons/redis-logo.svg new file mode 100644 index 0000000..424f1e5 --- /dev/null +++ b/signoz/frontend/public/Icons/redis-logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Icons/tetra-pack.svg b/signoz/frontend/public/Icons/tetra-pack.svg new file mode 100644 index 0000000..13fd4f2 --- /dev/null +++ b/signoz/frontend/public/Icons/tetra-pack.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Icons/tools.svg b/signoz/frontend/public/Icons/tools.svg new file mode 100644 index 0000000..f4d33bc --- /dev/null +++ b/signoz/frontend/public/Icons/tools.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Images/blankDashboardTemplatePreview.svg b/signoz/frontend/public/Images/blankDashboardTemplatePreview.svg new file mode 100644 index 0000000..5c93cf3 --- /dev/null +++ b/signoz/frontend/public/Images/blankDashboardTemplatePreview.svg @@ -0,0 +1,234 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/signoz/frontend/public/Images/eyesEmoji.svg b/signoz/frontend/public/Images/eyesEmoji.svg new file mode 100644 index 0000000..5d7e56a --- /dev/null +++ b/signoz/frontend/public/Images/eyesEmoji.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + diff --git a/signoz/frontend/public/Images/notFound404.png b/signoz/frontend/public/Images/notFound404.png new file mode 100644 index 0000000..f803724 Binary files /dev/null and b/signoz/frontend/public/Images/notFound404.png differ diff --git a/signoz/frontend/public/Images/redisTemplatePreview.svg b/signoz/frontend/public/Images/redisTemplatePreview.svg new file mode 100644 index 0000000..aed4e97 --- /dev/null +++ b/signoz/frontend/public/Images/redisTemplatePreview.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/signoz/frontend/public/Logos/azure-aks.svg b/signoz/frontend/public/Logos/azure-aks.svg new file mode 100644 index 0000000..d456727 --- /dev/null +++ b/signoz/frontend/public/Logos/azure-aks.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/azure-app-service.svg b/signoz/frontend/public/Logos/azure-app-service.svg new file mode 100644 index 0000000..54051fc --- /dev/null +++ b/signoz/frontend/public/Logos/azure-app-service.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/azure-blob-storage.svg b/signoz/frontend/public/Logos/azure-blob-storage.svg new file mode 100644 index 0000000..1650133 --- /dev/null +++ b/signoz/frontend/public/Logos/azure-blob-storage.svg @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/azure-container-apps.svg b/signoz/frontend/public/Logos/azure-container-apps.svg new file mode 100644 index 0000000..3dd3d4d --- /dev/null +++ b/signoz/frontend/public/Logos/azure-container-apps.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/azure-functions.svg b/signoz/frontend/public/Logos/azure-functions.svg new file mode 100644 index 0000000..9face30 --- /dev/null +++ b/signoz/frontend/public/Logos/azure-functions.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/azure-sql-database-metrics.svg b/signoz/frontend/public/Logos/azure-sql-database-metrics.svg new file mode 100644 index 0000000..fed6997 --- /dev/null +++ b/signoz/frontend/public/Logos/azure-sql-database-metrics.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/azure-vm.svg b/signoz/frontend/public/Logos/azure-vm.svg new file mode 100644 index 0000000..bde2b81 --- /dev/null +++ b/signoz/frontend/public/Logos/azure-vm.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/cloudwatch.png b/signoz/frontend/public/Logos/cloudwatch.png new file mode 100644 index 0000000..57cd671 Binary files /dev/null and b/signoz/frontend/public/Logos/cloudwatch.png differ diff --git a/signoz/frontend/public/Logos/cmd-terminal.svg b/signoz/frontend/public/Logos/cmd-terminal.svg new file mode 100644 index 0000000..9eb82fb --- /dev/null +++ b/signoz/frontend/public/Logos/cmd-terminal.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/docker.svg b/signoz/frontend/public/Logos/docker.svg new file mode 100644 index 0000000..ff2b2b4 --- /dev/null +++ b/signoz/frontend/public/Logos/docker.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/dotnet.png b/signoz/frontend/public/Logos/dotnet.png new file mode 100644 index 0000000..53c3da3 Binary files /dev/null and b/signoz/frontend/public/Logos/dotnet.png differ diff --git a/signoz/frontend/public/Logos/ec2.svg b/signoz/frontend/public/Logos/ec2.svg new file mode 100644 index 0000000..14f083f --- /dev/null +++ b/signoz/frontend/public/Logos/ec2.svg @@ -0,0 +1,18 @@ + + + + Icon-Architecture/64/Arch_Amazon-EC2_64 + Created with Sketch. + + + + + + + + + + + + + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/ecs.svg b/signoz/frontend/public/Logos/ecs.svg new file mode 100644 index 0000000..c2ef4c2 --- /dev/null +++ b/signoz/frontend/public/Logos/ecs.svg @@ -0,0 +1,18 @@ + + + + Icon-Architecture/64/Arch_Amazon-Elastic-Container-Service_64 + Created with Sketch. + + + + + + + + + + + + + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/eks.svg b/signoz/frontend/public/Logos/eks.svg new file mode 100644 index 0000000..b4a9336 --- /dev/null +++ b/signoz/frontend/public/Logos/eks.svg @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/elixir.png b/signoz/frontend/public/Logos/elixir.png new file mode 100644 index 0000000..909a736 Binary files /dev/null and b/signoz/frontend/public/Logos/elixir.png differ diff --git a/signoz/frontend/public/Logos/fluent-bit.png b/signoz/frontend/public/Logos/fluent-bit.png new file mode 100644 index 0000000..55c4e4a Binary files /dev/null and b/signoz/frontend/public/Logos/fluent-bit.png differ diff --git a/signoz/frontend/public/Logos/fluentd.png b/signoz/frontend/public/Logos/fluentd.png new file mode 100644 index 0000000..e87fc62 Binary files /dev/null and b/signoz/frontend/public/Logos/fluentd.png differ diff --git a/signoz/frontend/public/Logos/go.png b/signoz/frontend/public/Logos/go.png new file mode 100644 index 0000000..70f7bd9 Binary files /dev/null and b/signoz/frontend/public/Logos/go.png differ diff --git a/signoz/frontend/public/Logos/heroku.png b/signoz/frontend/public/Logos/heroku.png new file mode 100644 index 0000000..0328d37 Binary files /dev/null and b/signoz/frontend/public/Logos/heroku.png differ diff --git a/signoz/frontend/public/Logos/http.png b/signoz/frontend/public/Logos/http.png new file mode 100644 index 0000000..83ccf10 Binary files /dev/null and b/signoz/frontend/public/Logos/http.png differ diff --git a/signoz/frontend/public/Logos/java.png b/signoz/frontend/public/Logos/java.png new file mode 100644 index 0000000..9e48da3 Binary files /dev/null and b/signoz/frontend/public/Logos/java.png differ diff --git a/signoz/frontend/public/Logos/javascript.png b/signoz/frontend/public/Logos/javascript.png new file mode 100644 index 0000000..f58be12 Binary files /dev/null and b/signoz/frontend/public/Logos/javascript.png differ diff --git a/signoz/frontend/public/Logos/kubernetes.svg b/signoz/frontend/public/Logos/kubernetes.svg new file mode 100644 index 0000000..86e288b --- /dev/null +++ b/signoz/frontend/public/Logos/kubernetes.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/logstash.svg b/signoz/frontend/public/Logos/logstash.svg new file mode 100644 index 0000000..cf718cd --- /dev/null +++ b/signoz/frontend/public/Logos/logstash.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/ms-net-framework.png b/signoz/frontend/public/Logos/ms-net-framework.png new file mode 100644 index 0000000..5b0baac Binary files /dev/null and b/signoz/frontend/public/Logos/ms-net-framework.png differ diff --git a/signoz/frontend/public/Logos/node-js.svg b/signoz/frontend/public/Logos/node-js.svg new file mode 100644 index 0000000..9c2d5c6 --- /dev/null +++ b/signoz/frontend/public/Logos/node-js.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/php.png b/signoz/frontend/public/Logos/php.png new file mode 100644 index 0000000..9abe0a9 Binary files /dev/null and b/signoz/frontend/public/Logos/php.png differ diff --git a/signoz/frontend/public/Logos/python.png b/signoz/frontend/public/Logos/python.png new file mode 100644 index 0000000..664f75d Binary files /dev/null and b/signoz/frontend/public/Logos/python.png differ diff --git a/signoz/frontend/public/Logos/rails.png b/signoz/frontend/public/Logos/rails.png new file mode 100644 index 0000000..0a44785 Binary files /dev/null and b/signoz/frontend/public/Logos/rails.png differ diff --git a/signoz/frontend/public/Logos/rust.png b/signoz/frontend/public/Logos/rust.png new file mode 100644 index 0000000..08acc5d Binary files /dev/null and b/signoz/frontend/public/Logos/rust.png differ diff --git a/signoz/frontend/public/Logos/signoz-brand-logo.svg b/signoz/frontend/public/Logos/signoz-brand-logo.svg new file mode 100644 index 0000000..aaa8a77 --- /dev/null +++ b/signoz/frontend/public/Logos/signoz-brand-logo.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/signoz/frontend/public/Logos/software-window.svg b/signoz/frontend/public/Logos/software-window.svg new file mode 100644 index 0000000..60bf068 --- /dev/null +++ b/signoz/frontend/public/Logos/software-window.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/Logos/swift.png b/signoz/frontend/public/Logos/swift.png new file mode 100644 index 0000000..79429f1 Binary files /dev/null and b/signoz/frontend/public/Logos/swift.png differ diff --git a/signoz/frontend/public/Logos/syslogs.svg b/signoz/frontend/public/Logos/syslogs.svg new file mode 100644 index 0000000..40f9055 --- /dev/null +++ b/signoz/frontend/public/Logos/syslogs.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/signoz/frontend/public/Logos/vercel.png b/signoz/frontend/public/Logos/vercel.png new file mode 100644 index 0000000..0ac66ce Binary files /dev/null and b/signoz/frontend/public/Logos/vercel.png differ diff --git a/signoz/frontend/public/SigNoz-dark.svg b/signoz/frontend/public/SigNoz-dark.svg new file mode 100644 index 0000000..41a6fb8 --- /dev/null +++ b/signoz/frontend/public/SigNoz-dark.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/SigNoz-white.svg b/signoz/frontend/public/SigNoz-white.svg new file mode 100644 index 0000000..17cc833 --- /dev/null +++ b/signoz/frontend/public/SigNoz-white.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/signoz/frontend/public/favicon.ico b/signoz/frontend/public/favicon.ico new file mode 100644 index 0000000..98a6d5f Binary files /dev/null and b/signoz/frontend/public/favicon.ico differ diff --git a/signoz/frontend/public/fonts/GeistMonoVF.woff2 b/signoz/frontend/public/fonts/GeistMonoVF.woff2 new file mode 100644 index 0000000..fb2f024 Binary files /dev/null and b/signoz/frontend/public/fonts/GeistMonoVF.woff2 differ diff --git a/signoz/frontend/public/locales/en-GB/alerts.json b/signoz/frontend/public/locales/en-GB/alerts.json new file mode 100644 index 0000000..a43d04a --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/alerts.json @@ -0,0 +1,123 @@ +{ + "target_missing": "Please enter a threshold to proceed", + "rule_test_fired": "Test notification sent successfully", + "no_alerts_found": "No alerts found during the evaluation. This happens when rule condition is unsatisfied. You may adjust the rule threshold and retry.", + "button_testrule": "Test Notification", + "label_channel_select": "Notification Channels", + "placeholder_channel_select": "select one or more channels", + "channel_select_tooltip": "Leave empty to send this alert on all the configured channels", + "preview_chart_unexpected_error": "An unexpeced error occurred updating the chart, please check your query.", + "preview_chart_threshold_label": "Threshold", + "placeholder_label_key_pair": "Click here to enter a label (key value pairs)", + "button_yes": "Yes", + "button_no": "No", + "remove_label_confirm": "This action will remove all the labels. Do you want to proceed?", + "remove_label_success": "Labels cleared", + "alert_form_step1": "Step 1 - Define the metric", + "alert_form_step2": "Step 2 - Define Alert Conditions", + "alert_form_step3": "Step 3 - Alert Configuration", + "metric_query_max_limit": "Can not create query. You can create maximum of 5 queries", + "confirm_save_title": "Save Changes", + "confirm_save_content_part1": "Your alert built with", + "confirm_save_content_part2": "query will be saved. Press OK to confirm.", + "unexpected_error": "Sorry, an unexpected error occurred. Please contact your admin", + "rule_created": "Rule created successfully", + "rule_edited": "Rule edited successfully", + "expression_missing": "expression is missing in {{where}}", + "metricname_missing": "metric name is missing in {{where}}", + "condition_required": "at least one metric condition is required", + "alertname_required": "alert name is required", + "promql_required": "promql expression is required when query format is set to PromQL", + "chquery_required": "query is required when query format is set to ClickHouse", + "button_savechanges": "Save Rule", + "button_createrule": "Create Rule", + "button_returntorules": "Return to rules", + "button_cancelchanges": "Cancel", + "button_discard": "Discard", + "text_condition1": "Send a notification when", + "text_condition2": "the threshold", + "text_condition3": "during the last", + "option_1min": "1 min", + "option_5min": "5 mins", + "option_10min": "10 mins", + "option_15min": "15 mins", + "option_30min": "30 mins", + "option_60min": "60 mins", + "option_4hours": "4 hours", + "option_3hours": "3 hours", + "option_6hours": "6 hours", + "option_12hours": "12 hours", + "option_24hours": "24 hours", + "field_threshold": "Alert Threshold", + "option_allthetimes": "all the times", + "option_atleastonce": "at least once", + "option_onaverage": "on average", + "option_intotal": "in total", + "option_above": "above", + "option_below": "below", + "option_equal": "is equal to", + "option_notequal": "not equal to", + "button_query": "Query", + "button_formula": "Formula", + "tab_qb": "Query Builder", + "tab_promql": "PromQL", + "tab_chquery": "ClickHouse Query", + "title_confirm": "Confirm", + "button_ok": "Yes", + "button_cancel": "No", + "field_promql_expr": "PromQL Expression", + "field_alert_name": "Alert Name", + "field_notification_channel": "Notification Channel", + "field_alert_desc": "Alert Description", + "field_labels": "Labels", + "field_severity": "Severity", + "option_critical": "Critical", + "option_error": "Error", + "option_warning": "Warning", + "option_info": "Info", + "user_guide_headline": "Steps to create an Alert", + "user_guide_qb_step1": "Step 1 - Define the metric", + "user_guide_qb_step1a": "Choose a metric which you want to create an alert on", + "user_guide_qb_step1b": "Filter it based on WHERE field or GROUPBY if needed", + "user_guide_qb_step1c": "Apply an aggregatiion function like COUNT, SUM, etc. or choose NOOP to plot the raw metric", + "user_guide_qb_step1d": "Create a formula based on Queries if needed", + "user_guide_qb_step2": "Step 2 - Define Alert Conditions", + "user_guide_qb_step2a": "Select the evaluation interval, threshold type and whether you want to alert above/below a value", + "user_guide_qb_step2b": "Enter the Alert threshold", + "user_guide_qb_step3": "Step 3 -Alert Configuration", + "user_guide_qb_step3a": "Set alert severity, name and descriptions", + "user_guide_qb_step3b": "Add tags to the alert in the Label field if needed", + "user_guide_pql_step1": "Step 1 - Define the metric", + "user_guide_pql_step1a": "Write a PromQL query for the metric", + "user_guide_pql_step1b": "Format the legends based on labels you want to highlight", + "user_guide_pql_step2": "Step 2 - Define Alert Conditions", + "user_guide_pql_step2a": "Select the threshold type and whether you want to alert above/below a value", + "user_guide_pql_step2b": "Enter the Alert threshold", + "user_guide_pql_step3": "Step 3 -Alert Configuration", + "user_guide_pql_step3a": "Set alert severity, name and descriptions", + "user_guide_pql_step3b": "Add tags to the alert in the Label field if needed", + "user_guide_ch_step1": "Step 1 - Define the metric", + "user_guide_ch_step1a": "Write a Clickhouse query for alert evaluation. Follow <0>this tutorial to learn about query format and supported vars.", + "user_guide_ch_step1b": "Format the legends based on labels you want to highlight in the preview chart", + "user_guide_ch_step2": "Step 2 - Define Alert Conditions", + "user_guide_ch_step2a": "Select the threshold type and whether you want to alert above/below a value", + "user_guide_ch_step2b": "Enter the Alert threshold", + "user_guide_ch_step3": "Step 3 -Alert Configuration", + "user_guide_ch_step3a": "Set alert severity, name and descriptions", + "user_guide_ch_step3b": "Add tags to the alert in the Label field if needed", + "user_tooltip_more_help": "More details on how to create alerts", + "choose_alert_type": "Choose a type for the alert", + "metric_based_alert": "Metric based Alert", + "metric_based_alert_desc": "Send a notification when a condition occurs in the metric data.", + "log_based_alert": "Log-based Alert", + "log_based_alert_desc": "Send a notification when a condition occurs in the logs data.", + "traces_based_alert": "Trace-based Alert", + "traces_based_alert_desc": "Send a notification when a condition occurs in the traces data.", + "exceptions_based_alert": "Exceptions-based Alert", + "exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.", + "field_unit": "Threshold unit", + "text_alert_on_absent": "Send a notification if data is missing for", + "text_alert_frequency": "Run alert every", + "text_for": "minutes", + "selected_query_placeholder": "Select query" +} diff --git a/signoz/frontend/public/locales/en-GB/channels.json b/signoz/frontend/public/locales/en-GB/channels.json new file mode 100644 index 0000000..807b7a6 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/channels.json @@ -0,0 +1,59 @@ +{ + "channel_delete_unexp_error": "Something went wrong", + "channel_delete_success": "Channel Deleted Successfully", + "column_channel_name": "Name", + "column_channel_type": "Type", + "column_channel_action": "Action", + "column_channel_edit": "Edit", + "button_new_channel": "New Alert Channel", + "tooltip_notification_channels": "More details on how to setting notification channels", + "sending_channels_note": "The alerts will be sent to all the configured channels.", + "loading_channels_message": "Loading Channels..", + "page_title_create": "New Notification Channels", + "page_title_edit": "Edit Notification Channels", + "button_save_channel": "Save", + "button_test_channel": "Test", + "button_return": "Back", + "field_channel_name": "Name", + "field_send_resolved": "Send resolved alerts", + "field_channel_type": "Type", + "field_webhook_url": "Webhook URL", + "field_slack_recipient": "Recipient", + "field_slack_title": "Title", + "field_slack_description": "Description", + "field_webhook_username": "User Name (optional)", + "field_webhook_password": "Password (optional)", + "field_pager_routing_key": "Routing Key", + "field_pager_description": "Description", + "field_pager_severity": "Severity", + "field_pager_details": "Additional Information", + "field_pager_component": "Component", + "field_pager_group": "Group", + "field_pager_class": "Class", + "field_pager_client": "Client", + "field_pager_client_url": "Client URL", + "placeholder_slack_description": "Description", + "placeholder_pager_description": "Description", + "help_pager_client": "Shows up as event source in Pagerduty", + "help_pager_client_url": "Shows up as event source link in Pagerduty", + "help_pager_class": "The class/type of the event", + "help_pager_details": "Specify a key-value format (must be a valid json)", + "help_pager_group": "A cluster or grouping of sources", + "help_pager_component": "The part or component of the affected system that is broke", + "help_pager_severity": "Severity of the incident, must be one of: must be one of the following: 'critical', 'warning', 'error' or 'info'", + "help_webhook_username": "Leave empty for bearer auth or when authentication is not necessary.", + "help_webhook_password": "Specify a password or bearer token", + "help_pager_description": "Shows up as description in pagerduty", + "channel_creation_done": "Successfully created the channel", + "channel_creation_failed": "An unexpected error occurred while creating this channel", + "channel_edit_done": "Channels Edited Successfully", + "channel_edit_failed": "An unexpected error occurred while updating this channel", + "selected_channel_invalid": "Channel type selected is invalid", + "username_no_password": "A Password must be provided with user name", + "test_unsupported": "Sorry, this channel type does not support test yet", + "channel_test_done": "An alert has been sent to this channel", + "channel_test_failed": "Failed to send a test message to this channel, please confirm that the parameters are set correctly", + "channel_test_unexpected": "An unexpected error occurred while sending a message to this channel, please try again", + "webhook_url_required": "Webhook URL is mandatory", + "slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)" +} \ No newline at end of file diff --git a/signoz/frontend/public/locales/en-GB/common.json b/signoz/frontend/public/locales/en-GB/common.json new file mode 100644 index 0000000..f167aec --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/common.json @@ -0,0 +1,10 @@ +{ + "something_went_wrong": "Something went wrong", + "already_logged_in": "Already Logged In", + "success": "Success", + "cancel": "Cancel", + "share": "Share", + "save": "Save", + "edit": "Edit", + "logged_in": "Logged In" +} diff --git a/signoz/frontend/public/locales/en-GB/dashboard.json b/signoz/frontend/public/locales/en-GB/dashboard.json new file mode 100644 index 0000000..ec804cc --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/dashboard.json @@ -0,0 +1,30 @@ +{ + "create_dashboard": "Create Dashboard", + "import_json": "Import Dashboard JSON", + "import_grafana_json": "Import Grafana JSON", + "copy_to_clipboard": "Copy To ClipBoard", + "download_json": "Download JSON", + "view_json": "View JSON", + "export_dashboard": "Export this dashboard.", + "upload_json_file": "Upload JSON file", + "paste_json_below": "Paste JSON below", + "error_upload_json": "Invalid JSON", + "import_and_next": "Import and Next", + "import_dashboard_by_pasting": "Import dashboard by pasting JSON or importing JSON file", + "error_loading_json": "Error loading JSON file", + "empty_json_not_allowed": "Empty JSON is not allowed", + "new_dashboard_title": "Sample Title", + "layout_saved_successfully": "Layout saved successfully", + "add_panel": "Add Panel", + "add_row": "Add Row", + "save_layout": "Save Layout", + "variable_updated_successfully": "Variable updated successfully", + "error_while_updating_variable": "Error while updating variable", + "dashboard_has_been_updated": "Dashboard has been updated", + "do_you_want_to_refresh_the_dashboard": "Do you want to refresh the dashboard?", + "delete_dashboard_success": "{{name}} dashboard deleted successfully", + "dashboard_unsave_changes": "There are unsaved changes in the Query builder, please stage and run the query or the changes will be lost. Press OK to discard.", + "dashboard_save_changes": "Your graph built with {{queryTag}} query will be saved. Press OK to confirm.", + "your_graph_build_with": "Your graph built with", + "dashboard_ok_confirm": "query will be saved. Press OK to confirm." +} diff --git a/signoz/frontend/public/locales/en-GB/errorDetails.json b/signoz/frontend/public/locales/en-GB/errorDetails.json new file mode 100644 index 0000000..f29f499 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/errorDetails.json @@ -0,0 +1,7 @@ +{ + "see_trace_graph": "See what happened before and after this error in a trace graph", + "see_error_in_trace_graph": "See the error in trace graph", + "stack_trace": "Stacktrace", + "older": "Older", + "newer": "Newer" +} diff --git a/signoz/frontend/public/locales/en-GB/explorer.json b/signoz/frontend/public/locales/en-GB/explorer.json new file mode 100644 index 0000000..b4ffa61 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/explorer.json @@ -0,0 +1,3 @@ +{ + "name_of_the_view": "Name of the view" +} \ No newline at end of file diff --git a/signoz/frontend/public/locales/en-GB/generalSettings.json b/signoz/frontend/public/locales/en-GB/generalSettings.json new file mode 100644 index 0000000..1519873 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/generalSettings.json @@ -0,0 +1,21 @@ +{ + "total_retention_period": "Total Retention Period", + "move_to_s3": "Move to S3\n(should be lower than total retention period)", + "status_message": { + "success": "Your last call to change retention period to {{total_retention}} {{s3_part}} was successful.", + "failed": "Your last call to change retention period to {{total_retention}} {{s3_part}} failed. Please try again.", + "pending": "Your last call to change retention period to {{total_retention}} {{s3_part}} is pending. This may take some time.", + "s3_part": "and S3 to {{s3_retention}}" + }, + "retention_save_button": { + "pending": "Updating {{name}} retention period", + "success": "Save" + }, + "retention_request_race_condition": "Your request to change retention period has failed, as another request is still in process.", + "retention_error_message": "There was an issue in changing the retention period for {{name}}. Please try again or reach out to support@signoz.io", + "retention_failed_message": "There was an issue in changing the retention period. Please try again or reach out to support@signoz.io", + "retention_comparison_error": "Total retention period for {{name}} can’t be lower or equal to the period after which data is moved to s3.", + "retention_null_value_error": "Retention Period for {{name}} is not set yet. Please set by choosing below", + "retention_confirmation": "Are you sure you want to change the retention period?", + "retention_confirmation_description": "This will change the amount of storage needed for saving {{name}}." +} diff --git a/signoz/frontend/public/locales/en-GB/ingestionKeys.json b/signoz/frontend/public/locales/en-GB/ingestionKeys.json new file mode 100644 index 0000000..256e883 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/ingestionKeys.json @@ -0,0 +1,3 @@ +{ + "delete_confirm_message": "Are you sure you want to delete {{keyName}}? Deleting an ingestion key is irreversible and cannot be undone." +} diff --git a/signoz/frontend/public/locales/en-GB/licenses.json b/signoz/frontend/public/locales/en-GB/licenses.json new file mode 100644 index 0000000..ed7c3ee --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/licenses.json @@ -0,0 +1,13 @@ +{ + "column_license_key": "License Key", + "column_valid_from": "Valid From", + "column_valid_until": "Valid Until", + "column_license_status": "Status", + "button_apply": "Apply", + "placeholder_license_key": "Enter a License Key", + "tab_current_license": "Current License", + "tab_license_history": "History", + "loading_licenses": "Loading licenses...", + "enter_license_key": "Please enter a license key", + "license_applied": "License applied successfully" +} diff --git a/signoz/frontend/public/locales/en-GB/login.json b/signoz/frontend/public/locales/en-GB/login.json new file mode 100644 index 0000000..84c9e1d --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/login.json @@ -0,0 +1,22 @@ +{ + "label_email": "Email", + "placeholder_email": "name@yourcompany.com", + "label_password": "Password", + "button_initiate_login": "Next", + "button_login": "Login", + "login_page_title": "Login with SigNoz", + "login_with_sso": "Login with SSO", + "login_with_pwd": "Login with password", + "forgot_password": "Forgot password?", + "create_an_account": "Create an account", + "prompt_if_admin": "If you are admin,", + "prompt_create_account": "If you are setting up SigNoz for the first time,", + "prompt_no_account": "Don't have an account? Contact your admin to send you an invite link.", + "prompt_forgot_password": "Ask your admin to reset your password and send you a new invite link", + "prompt_on_sso_error": "Are you trying to resolve SSO configuration issue?", + "unexpected_error": "Sorry, something went wrong", + "failed_to_login": "sorry, failed to login", + "invalid_email": "Please enter a valid email address", + "invalid_account": "This account does not exist. To create a new account, contact your admin to get an invite link", + "invalid_config": "Invalid configuration detected, please contact your administrator" +} \ No newline at end of file diff --git a/signoz/frontend/public/locales/en-GB/logs.json b/signoz/frontend/public/locales/en-GB/logs.json new file mode 100644 index 0000000..804f66f --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/logs.json @@ -0,0 +1 @@ +{ "fetching_log_lines": "Fetching log lines" } diff --git a/signoz/frontend/public/locales/en-GB/onboarding.json b/signoz/frontend/public/locales/en-GB/onboarding.json new file mode 100644 index 0000000..5732826 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/onboarding.json @@ -0,0 +1,8 @@ +{ + "invite_user": "Invite your teammates", + "invite": "Invite", + "skip": "Skip", + "invite_user_helper_text": "Not the right person to get started? No worries! Invite someone who can.", + "select_use_case": "Select a use-case to get started", + "get_started": "Get Started" +} diff --git a/signoz/frontend/public/locales/en-GB/organizationsettings.json b/signoz/frontend/public/locales/en-GB/organizationsettings.json new file mode 100644 index 0000000..74654d9 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/organizationsettings.json @@ -0,0 +1,18 @@ +{ + "display_name": "Display Name", + "signoz": "SigNoz", + "email_address": "Email address", + "name_optional": "Name (optional)", + "role": "Role", + "email_placeholder": "john@signoz.io", + "name_placeholder": "John", + "add_another_team_member": "Add another team member", + "invite_team_members": "Invite team members", + "invite_members": "Invite Members", + "pending_invites": "Pending Invites", + "authenticated_domains": "Authenticated Domains", + "delete_domain_message": "Are you sure you want to delete this domain?", + "delete_domain": "Delete Domain", + "add_domain": "Add Domains", + "saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly" +} diff --git a/signoz/frontend/public/locales/en-GB/routes.json b/signoz/frontend/public/locales/en-GB/routes.json new file mode 100644 index 0000000..ede3f46 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/routes.json @@ -0,0 +1,15 @@ +{ + "general": "General", + "alert_channels": "Alert Channels", + "organization_settings": "Organization Settings", + "ingestion_settings": "Ingestion Settings", + "api_keys": "Access Tokens", + "my_settings": "My Settings", + "overview_metrics": "Overview Metrics", + "dbcall_metrics": "Database Calls", + "external_metrics": "External Calls", + "pipeline": "Pipeline", + "pipelines": "Pipelines", + "archives": "Archives", + "logs_to_metrics": "Logs To Metrics" +} diff --git a/signoz/frontend/public/locales/en-GB/rules.json b/signoz/frontend/public/locales/en-GB/rules.json new file mode 100644 index 0000000..9d55a0b --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/rules.json @@ -0,0 +1,86 @@ +{ + "preview_chart_unexpected_error": "An unexpeced error occurred updating the chart, please check your query.", + "preview_chart_threshold_label": "Threshold", + "placeholder_label_key_pair": "Click here to enter a label (key value pairs)", + "button_yes": "Yes", + "button_no": "No", + "remove_label_confirm": "This action will remove all the labels. Do you want to proceed?", + "remove_label_success": "Labels cleared", + "alert_form_step1": "Step 1 - Define the metric", + "alert_form_step2": "Step 2 - Define Alert Conditions", + "alert_form_step3": "Step 3 - Alert Configuration", + "metric_query_max_limit": "Can not create query. You can create maximum of 5 queries", + "confirm_save_title": "Save Changes", + "confirm_save_content_part1": "Your alert built with", + "confirm_save_content_part2": "query will be saved. Press OK to confirm.", + "unexpected_error": "Sorry, an unexpected error occurred. Please contact your admin", + "rule_created": "Rule created successfully", + "rule_edited": "Rule edited successfully", + "expression_missing": "expression is missing in {{where}}", + "metricname_missing": "metric name is missing in {{where}}", + "condition_required": "at least one metric condition is required", + "alertname_required": "alert name is required", + "promql_required": "promql expression is required when query format is set to PromQL", + "button_savechanges": "Save Rule", + "button_createrule": "Create Rule", + "button_returntorules": "Return to rules", + "button_cancelchanges": "Cancel", + "button_discard": "Discard", + "text_condition1": "Send a notification when", + "text_condition2": "the threshold", + "text_condition3": "during the last", + "option_5min": "5 mins", + "option_10min": "10 mins", + "option_15min": "15 mins", + "option_60min": "60 mins", + "option_4hours": "4 hours", + "option_24hours": "24 hours", + "field_threshold": "Alert Threshold", + "option_allthetimes": "all the times", + "option_atleastonce": "at least once", + "option_onaverage": "on average", + "option_intotal": "in total", + "option_above": "above", + "option_below": "below", + "option_equal": "is equal to", + "option_notequal": "not equal to", + "button_query": "Query", + "button_formula": "Formula", + "tab_qb": "Query Builder", + "tab_promql": "PromQL", + "title_confirm": "Confirm", + "button_ok": "Yes", + "button_cancel": "No", + "field_promql_expr": "PromQL Expression", + "field_alert_name": "Alert Name", + "field_alert_desc": "Alert Description", + "field_notification_channel": "Notification Channel", + "field_labels": "Labels", + "field_severity": "Severity", + "option_critical": "Critical", + "option_error": "Error", + "option_warning": "Warning", + "option_info": "Info", + "user_guide_headline": "Steps to create an Alert", + "user_guide_qb_step1": "Step 1 - Define the metric", + "user_guide_qb_step1a": "Choose a metric which you want to create an alert on", + "user_guide_qb_step1b": "Filter it based on WHERE field or GROUPBY if needed", + "user_guide_qb_step1c": "Apply an aggregatiion function like COUNT, SUM, etc. or choose NOOP to plot the raw metric", + "user_guide_qb_step1d": "Create a formula based on Queries if needed", + "user_guide_qb_step2": "Step 2 - Define Alert Conditions", + "user_guide_qb_step2a": "Select the evaluation interval, threshold type and whether you want to alert above/below a value", + "user_guide_qb_step2b": "Enter the Alert threshold", + "user_guide_qb_step3": "Step 3 -Alert Configuration", + "user_guide_qb_step3a": "Set alert severity, name and descriptions", + "user_guide_qb_step3b": "Add tags to the alert in the Label field if needed", + "user_guide_pql_step1": "Step 1 - Define the metric", + "user_guide_pql_step1a": "Write a PromQL query for the metric", + "user_guide_pql_step1b": "Format the legends based on labels you want to highlight", + "user_guide_pql_step2": "Step 2 - Define Alert Conditions", + "user_guide_pql_step2a": "Select the threshold type and whether you want to alert above/below a value", + "user_guide_pql_step2b": "Enter the Alert threshold", + "user_guide_pql_step3": "Step 3 -Alert Configuration", + "user_guide_pql_step3a": "Set alert severity, name and descriptions", + "user_guide_pql_step3b": "Add tags to the alert in the Label field if needed", + "user_tooltip_more_help": "More details on how to create alerts" +} diff --git a/signoz/frontend/public/locales/en-GB/services.json b/signoz/frontend/public/locales/en-GB/services.json new file mode 100644 index 0000000..4c49847 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/services.json @@ -0,0 +1,3 @@ +{ + "rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support." +} diff --git a/signoz/frontend/public/locales/en-GB/settings.json b/signoz/frontend/public/locales/en-GB/settings.json new file mode 100644 index 0000000..b5041b3 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/settings.json @@ -0,0 +1,5 @@ +{ + "current_password": "Current Password", + "new_password": "New Password", + "change_password": "Change Password" +} diff --git a/signoz/frontend/public/locales/en-GB/signup.json b/signoz/frontend/public/locales/en-GB/signup.json new file mode 100644 index 0000000..9e0d586 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/signup.json @@ -0,0 +1,18 @@ +{ + "label_email": "Email", + "placeholder_email": "name@yourcompany.com", + "label_password": "Password", + "label_confirm_password": "Confirm Password", + "label_firstname": "First Name", + "placeholder_firstname": "Your Name", + "label_orgname": "Organization Name", + "placeholder_orgname": "Your Company", + "prompt_keepme_posted": "Keep me updated on new SigNoz features", + "prompt_anonymise": "Anonymise my usage data. We collect data to measure product usage", + "failed_confirm_password": "Passwords don’t match. Please try again", + "unexpected_error": "Something went wrong", + "failed_to_initiate_login": "Signup completed but failed to initiate login", + "token_required": "Invite token is required for signup, please request one from your admin", + "button_get_started": "Get Started", + "prompt_admin_warning": "This will create an admin account. If you are not an admin, please ask your admin for an invite link" +} \ No newline at end of file diff --git a/signoz/frontend/public/locales/en-GB/titles.json b/signoz/frontend/public/locales/en-GB/titles.json new file mode 100644 index 0000000..0eb98e9 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/titles.json @@ -0,0 +1,42 @@ +{ + "SIGN_UP": "SigNoz | Sign Up", + "LOGIN": "SigNoz | Login", + "GET_STARTED": "SigNoz | Get Started", + "SERVICE_METRICS": "SigNoz | Service Metrics", + "SERVICE_MAP": "SigNoz | Service Map", + "TRACE": "SigNoz | Trace", + "TRACE_DETAIL": "SigNoz | Trace Detail", + "TRACES_EXPLORER": "SigNoz | Traces Explorer", + "SETTINGS": "SigNoz | Settings", + "USAGE_EXPLORER": "SigNoz | Usage Explorer", + "APPLICATION": "SigNoz | Home", + "BILLING": "SigNoz | Billing", + "ALL_DASHBOARD": "SigNoz | All Dashboards", + "DASHBOARD": "SigNoz | Dashboard", + "DASHBOARD_WIDGET": "SigNoz | Dashboard Widget", + "EDIT_ALERTS": "SigNoz | Edit Alerts", + "LIST_ALL_ALERT": "SigNoz | All Alerts", + "ALERTS_NEW": "SigNoz | New Alert", + "ALL_CHANNELS": "SigNoz | All Channels", + "CHANNELS_NEW": "SigNoz | New Channel", + "CHANNELS_EDIT": "SigNoz | Edit Channel", + "ALL_ERROR": "SigNoz | All Errors", + "ERROR_DETAIL": "SigNoz | Error Detail", + "VERSION": "SigNoz | Version", + "MY_SETTINGS": "SigNoz | My Settings", + "ORG_SETTINGS": "SigNoz | Organization Settings", + "INGESTION_SETTINGS": "SigNoz | Ingestion Settings", + "API_KEYS": "SigNoz | Access Tokens", + "SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong", + "UN_AUTHORIZED": "SigNoz | Unauthorized", + "NOT_FOUND": "SigNoz | Page Not Found", + "LOGS": "SigNoz | Logs", + "LOGS_EXPLORER": "SigNoz | Logs Explorer", + "LIVE_LOGS": "SigNoz | Live Logs", + "HOME_PAGE": "Open source Observability Platform | SigNoz", + "PASSWORD_RESET": "SigNoz | Password Reset", + "LIST_LICENSES": "SigNoz | List of Licenses", + "WORKSPACE_LOCKED": "SigNoz | Workspace Locked", + "SUPPORT": "SigNoz | Support", + "DEFAULT": "Open source Observability Platform | SigNoz" +} diff --git a/signoz/frontend/public/locales/en-GB/trace.json b/signoz/frontend/public/locales/en-GB/trace.json new file mode 100644 index 0000000..c1e5519 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/trace.json @@ -0,0 +1,11 @@ +{ + "options_menu": { + "options": "Options", + "format": "Format", + "raw": "Raw", + "default": "Default", + "column": "Column", + "maxLines": "Max lines per Row", + "addColumn": "Add a column" + } +} diff --git a/signoz/frontend/public/locales/en-GB/traceDetails.json b/signoz/frontend/public/locales/en-GB/traceDetails.json new file mode 100644 index 0000000..bb61ff2 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/traceDetails.json @@ -0,0 +1,3 @@ +{ + "search_tags": "Search Tag Names" +} diff --git a/signoz/frontend/public/locales/en-GB/translation.json b/signoz/frontend/public/locales/en-GB/translation.json new file mode 100644 index 0000000..c1bd296 --- /dev/null +++ b/signoz/frontend/public/locales/en-GB/translation.json @@ -0,0 +1,19 @@ +{ + "monitor_signup": "Monitor your applications. Find what is causing issues.", + "version": "Version", + "latest_version": "Latest version", + "current_version": "Current version", + "release_notes": "Release Notes", + "read_how_to_upgrade": "Read instructions on how to upgrade", + "latest_version_signoz": "You are running the latest version of SigNoz.", + "stale_version": "You are on an older version and may be losing out on the latest features we have shipped. We recommend to upgrade to the latest version", + "oops_something_went_wrong_version": "Oops.. facing issues with fetching updated version information", + "n_a": "N/A", + "routes": { + "general": "General", + "alert_channels": "Alert Channels", + "all_errors": "All Exceptions", + "index_fields": "Index Fields", + "pipelines": "Pipelines" + } +} diff --git a/signoz/frontend/public/locales/en/alerts.json b/signoz/frontend/public/locales/en/alerts.json new file mode 100644 index 0000000..e7ed623 --- /dev/null +++ b/signoz/frontend/public/locales/en/alerts.json @@ -0,0 +1,123 @@ +{ + "target_missing": "Please enter a threshold to proceed", + "rule_test_fired": "Test notification sent successfully", + "no_alerts_found": "No alerts found during the evaluation. This happens when rule condition is unsatisfied. You may adjust the rule threshold and retry.", + "button_testrule": "Test Notification", + "label_channel_select": "Notification Channels", + "placeholder_channel_select": "select one or more channels", + "channel_select_tooltip": "Leave empty to send this alert on all the configured channels", + "preview_chart_unexpected_error": "An unexpeced error occurred updating the chart, please check your query.", + "preview_chart_threshold_label": "Threshold", + "placeholder_label_key_pair": "Click here to enter a label (key value pairs)", + "button_yes": "Yes", + "button_no": "No", + "remove_label_confirm": "This action will remove all the labels. Do you want to proceed?", + "remove_label_success": "Labels cleared", + "alert_form_step1": "Step 1 - Define the metric", + "alert_form_step2": "Step 2 - Define Alert Conditions", + "alert_form_step3": "Step 3 - Alert Configuration", + "metric_query_max_limit": "Can not create query. You can create maximum of 5 queries", + "confirm_save_title": "Save Changes", + "confirm_save_content_part1": "Your alert built with", + "confirm_save_content_part2": "query will be saved. Press OK to confirm.", + "unexpected_error": "Sorry, an unexpected error occurred. Please contact your admin", + "rule_created": "Rule created successfully", + "rule_edited": "Rule edited successfully", + "expression_missing": "expression is missing in {{where}}", + "metricname_missing": "metric name is missing in {{where}}", + "condition_required": "at least one metric condition is required", + "alertname_required": "alert name is required", + "promql_required": "promql expression is required when query format is set to PromQL", + "chquery_required": "query is required when query format is set to ClickHouse", + "button_savechanges": "Save Rule", + "button_createrule": "Create Rule", + "button_returntorules": "Return to rules", + "button_cancelchanges": "Cancel", + "button_discard": "Discard", + "text_condition1": "Send a notification when", + "text_condition2": "the threshold", + "text_condition3": "during the last", + "option_1min": "1 min", + "option_5min": "5 mins", + "option_10min": "10 mins", + "option_15min": "15 mins", + "option_30min": "30 mins", + "option_60min": "60 mins", + "option_3hours": "3 hours", + "option_4hours": "4 hours", + "option_6hours": "6 hours", + "option_12hours": "12 hours", + "option_24hours": "24 hours", + "field_threshold": "Alert Threshold", + "option_allthetimes": "all the times", + "option_atleastonce": "at least once", + "option_onaverage": "on average", + "option_intotal": "in total", + "option_above": "above", + "option_below": "below", + "option_equal": "is equal to", + "option_notequal": "not equal to", + "button_query": "Query", + "button_formula": "Formula", + "tab_qb": "Query Builder", + "tab_promql": "PromQL", + "tab_chquery": "ClickHouse Query", + "title_confirm": "Confirm", + "button_ok": "Yes", + "button_cancel": "No", + "field_promql_expr": "PromQL Expression", + "field_alert_name": "Alert Name", + "field_alert_desc": "Alert Description", + "field_notification_channel": "Notification Channel", + "field_labels": "Labels", + "field_severity": "Severity", + "option_critical": "Critical", + "option_error": "Error", + "option_warning": "Warning", + "option_info": "Info", + "user_guide_headline": "Steps to create an Alert", + "user_guide_qb_step1": "Step 1 - Define the metric", + "user_guide_qb_step1a": "Choose a metric which you want to create an alert on", + "user_guide_qb_step1b": "Filter it based on WHERE field or GROUPBY if needed", + "user_guide_qb_step1c": "Apply an aggregatiion function like COUNT, SUM, etc. or choose NOOP to plot the raw metric", + "user_guide_qb_step1d": "Create a formula based on Queries if needed", + "user_guide_qb_step2": "Step 2 - Define Alert Conditions", + "user_guide_qb_step2a": "Select the evaluation interval, threshold type and whether you want to alert above/below a value", + "user_guide_qb_step2b": "Enter the Alert threshold", + "user_guide_qb_step3": "Step 3 -Alert Configuration", + "user_guide_qb_step3a": "Set alert severity, name and descriptions", + "user_guide_qb_step3b": "Add tags to the alert in the Label field if needed", + "user_guide_pql_step1": "Step 1 - Define the metric", + "user_guide_pql_step1a": "Write a PromQL query for the metric", + "user_guide_pql_step1b": "Format the legends based on labels you want to highlight", + "user_guide_pql_step2": "Step 2 - Define Alert Conditions", + "user_guide_pql_step2a": "Select the threshold type and whether you want to alert above/below a value", + "user_guide_pql_step2b": "Enter the Alert threshold", + "user_guide_pql_step3": "Step 3 -Alert Configuration", + "user_guide_pql_step3a": "Set alert severity, name and descriptions", + "user_guide_pql_step3b": "Add tags to the alert in the Label field if needed", + "user_guide_ch_step1": "Step 1 - Define the metric", + "user_guide_ch_step1a": "Write a Clickhouse query for alert evaluation. Follow <0>this tutorial to learn about query format and supported vars.", + "user_guide_ch_step1b": "Format the legends based on labels you want to highlight in the preview chart", + "user_guide_ch_step2": "Step 2 - Define Alert Conditions", + "user_guide_ch_step2a": "Select the threshold type and whether you want to alert above/below a value", + "user_guide_ch_step2b": "Enter the Alert threshold", + "user_guide_ch_step3": "Step 3 -Alert Configuration", + "user_guide_ch_step3a": "Set alert severity, name and descriptions", + "user_guide_ch_step3b": "Add tags to the alert in the Label field if needed", + "user_tooltip_more_help": "More details on how to create alerts", + "choose_alert_type": "Choose a type for the alert", + "metric_based_alert": "Metric based Alert", + "metric_based_alert_desc": "Send a notification when a condition occurs in the metric data.", + "log_based_alert": "Log-based Alert", + "log_based_alert_desc": "Send a notification when a condition occurs in the logs data.", + "traces_based_alert": "Trace-based Alert", + "traces_based_alert_desc": "Send a notification when a condition occurs in the traces data.", + "exceptions_based_alert": "Exceptions-based Alert", + "exceptions_based_alert_desc": "Send a notification when a condition occurs in the exceptions data.", + "field_unit": "Threshold unit", + "text_alert_on_absent": "Send a notification if data is missing for", + "text_alert_frequency": "Run alert every", + "text_for": "minutes", + "selected_query_placeholder": "Select query" +} diff --git a/signoz/frontend/public/locales/en/apiKeys.json b/signoz/frontend/public/locales/en/apiKeys.json new file mode 100644 index 0000000..fb86610 --- /dev/null +++ b/signoz/frontend/public/locales/en/apiKeys.json @@ -0,0 +1,3 @@ +{ + "delete_confirm_message": "Are you sure you want to delete {{keyName}} token? Deleting a token is irreversible and cannot be undone." +} diff --git a/signoz/frontend/public/locales/en/billings.json b/signoz/frontend/public/locales/en/billings.json new file mode 100644 index 0000000..fb706e0 --- /dev/null +++ b/signoz/frontend/public/locales/en/billings.json @@ -0,0 +1,14 @@ +{ + "days_remaining": "days remaining in your billing period.", + "billing": "Billing", + "manage_billing_and_costs": "Manage your billing information, invoices, and monitor costs.", + "enterprise_cloud": "Enterprise Cloud", + "enterprise": "Enterprise", + "card_details_recieved_and_billing_info": "We have received your card details, your billing will only start after the end of your free trial period.", + "upgrade_plan": "Upgrade Plan", + "manage_billing": "Manage Billing", + "upgrade_now_text": "Upgrade now to have uninterrupted access", + "billing_start_info": "Your billing will start only after the trial period", + "checkout_plans": "Check out features in paid plans", + "here": "here" +} diff --git a/signoz/frontend/public/locales/en/channels.json b/signoz/frontend/public/locales/en/channels.json new file mode 100644 index 0000000..0d9387d --- /dev/null +++ b/signoz/frontend/public/locales/en/channels.json @@ -0,0 +1,75 @@ +{ + "channel_delete_unexp_error": "Something went wrong", + "channel_delete_success": "Channel Deleted Successfully", + "column_channel_name": "Name", + "column_channel_type": "Type", + "column_channel_action": "Action", + "column_channel_edit": "Edit", + "button_new_channel": "New Alert Channel", + "tooltip_notification_channels": "More details on how to setting notification channels", + "sending_channels_note": "The alerts will be sent to all the configured channels.", + "loading_channels_message": "Loading Channels..", + "page_title_create": "New Notification Channels", + "page_title_edit": "Edit Notification Channels", + "button_save_channel": "Save", + "button_test_channel": "Test", + "button_return": "Back", + "field_channel_name": "Name", + "field_send_resolved": "Send resolved alerts", + "field_channel_type": "Type", + "field_webhook_url": "Webhook URL", + "field_slack_recipient": "Recipient", + "field_slack_title": "Title", + "field_slack_description": "Description", + "field_opsgenie_api_key": "API Key", + "field_opsgenie_description": "Description", + "placeholder_opsgenie_description": "Description", + "help_email_to": "Email address(es) to send alerts to (comma separated)", + "field_email_to": "To", + "placeholder_email_to": "To", + "help_email_html": "Send email in html format", + "field_email_html": "Email body template", + "placeholder_email_html": "Email body template", + "field_webhook_username": "User Name (optional)", + "field_webhook_password": "Password (optional)", + "field_pager_routing_key": "Routing Key", + "field_pager_description": "Description", + "field_pager_severity": "Severity", + "field_pager_details": "Additional Information", + "field_pager_component": "Component", + "field_pager_group": "Group", + "field_pager_class": "Class", + "field_pager_client": "Client", + "field_pager_client_url": "Client URL", + "field_opsgenie_message": "Message", + "field_opsgenie_priority": "Priority", + "placeholder_slack_description": "Description", + "placeholder_pager_description": "Description", + "placeholder_opsgenie_message": "Message", + "placeholder_opsgenie_priority": "Priority", + "help_pager_client": "Shows up as event source in Pagerduty", + "help_pager_client_url": "Shows up as event source link in Pagerduty", + "help_pager_class": "The class/type of the event", + "help_pager_details": "Specify a key-value format (must be a valid json)", + "help_pager_group": "A cluster or grouping of sources", + "help_pager_component": "The part or component of the affected system that is broke", + "help_pager_severity": "Severity of the incident, must be one of: must be one of the following: 'critical', 'warning', 'error' or 'info'", + "help_webhook_username": "Leave empty for bearer auth or when authentication is not necessary.", + "help_webhook_password": "Specify a password or bearer token", + "help_pager_description": "Shows up as description in pagerduty", + "help_opsgenie_message": "Shows up as message in opsgenie", + "help_opsgenie_priority": "Priority of the incident", + "help_opsgenie_description": "Shows up as description in opsgenie", + "channel_creation_done": "Successfully created the channel", + "channel_creation_failed": "An unexpected error occurred while creating this channel", + "channel_edit_done": "Channels Edited Successfully", + "channel_edit_failed": "An unexpected error occurred while updating this channel", + "selected_channel_invalid": "Channel type selected is invalid", + "username_no_password": "A Password must be provided with user name", + "test_unsupported": "Sorry, this channel type does not support test yet", + "channel_test_done": "An alert has been sent to this channel", + "channel_test_failed": "Failed to send a test message to this channel, please confirm that the parameters are set correctly", + "channel_test_unexpected": "An unexpected error occurred while sending a message to this channel, please try again", + "webhook_url_required": "Webhook URL is mandatory", + "slack_channel_help": "Specify channel or user, use #channel-name, @username (has to be all lowercase, no whitespace)" +} \ No newline at end of file diff --git a/signoz/frontend/public/locales/en/common.json b/signoz/frontend/public/locales/en/common.json new file mode 100644 index 0000000..72d9f13 --- /dev/null +++ b/signoz/frontend/public/locales/en/common.json @@ -0,0 +1,11 @@ +{ + "something_went_wrong": "Something went wrong", + "already_logged_in": "Already Logged In", + "success": "Success", + "cancel": "Cancel", + "share": "Share", + "save": "Save", + "edit": "Edit", + "logged_in": "Logged In", + "pending_data_placeholder": "Just a bit of patience, just a little bit’s enough ⎯ we’re getting your {{dataSource}}!" +} diff --git a/signoz/frontend/public/locales/en/dashboard.json b/signoz/frontend/public/locales/en/dashboard.json new file mode 100644 index 0000000..d2e9023 --- /dev/null +++ b/signoz/frontend/public/locales/en/dashboard.json @@ -0,0 +1,33 @@ +{ + "create_dashboard": "Create Dashboard", + "import_json": "Import Dashboard JSON", + "import_grafana_json": "Import Grafana JSON", + "copy_to_clipboard": "Copy To ClipBoard", + "download_json": "Download JSON", + "view_json": "View JSON", + "export_dashboard": "Export this dashboard.", + "upload_json_file": "Upload JSON file", + "paste_json_below": "Paste JSON below", + "error_upload_json": "Invalid JSON", + "import_and_next": "Import and Next", + "import_dashboard_by_pasting": "Import dashboard by pasting JSON or importing JSON file", + "error_loading_json": "Error loading JSON file", + "empty_json_not_allowed": "Empty JSON is not allowed", + "new_dashboard_title": "Sample Title", + "layout_saved_successfully": "Layout saved successfully", + "add_panel": "Add Panel", + "add_row": "Add Row", + "save_layout": "Save Layout", + "full_view": "Full Screen View", + "variable_updated_successfully": "Variable updated successfully", + "error_while_updating_variable": "Error while updating variable", + "dashboard_has_been_updated": "Dashboard has been updated", + "do_you_want_to_refresh_the_dashboard": "Do you want to refresh the dashboard?", + "locked_dashboard_delete_tooltip_admin_author": "Dashboard is locked. Please unlock the dashboard to enable delete.", + "locked_dashboard_delete_tooltip_editor": "Dashboard is locked. Please contact admin to delete the dashboard.", + "delete_dashboard_success": "{{name}} dashboard deleted successfully", + "dashboard_unsave_changes": "There are unsaved changes in the Query builder, please stage and run the query or the changes will be lost. Press OK to discard.", + "dashboard_save_changes": "Your graph built with {{queryTag}} query will be saved. Press OK to confirm.", + "your_graph_build_with": "Your graph built with", + "dashboard_ok_confirm": "query will be saved. Press OK to confirm." +} diff --git a/signoz/frontend/public/locales/en/errorDetails.json b/signoz/frontend/public/locales/en/errorDetails.json new file mode 100644 index 0000000..da68ed4 --- /dev/null +++ b/signoz/frontend/public/locales/en/errorDetails.json @@ -0,0 +1,9 @@ +{ + "see_trace_graph": "See what happened before and after this error in a trace graph", + "see_error_in_trace_graph": "See the error in trace graph", + "stack_trace": "Stacktrace", + "older": "Older", + "newer": "Newer", + "something_went_wrong": "Oops !!! Something went wrong", + "contact_if_issue_exists": "Don't worry, our team is here to help. Please contact support if the issue persists." +} diff --git a/signoz/frontend/public/locales/en/explorer.json b/signoz/frontend/public/locales/en/explorer.json new file mode 100644 index 0000000..65e4fc2 --- /dev/null +++ b/signoz/frontend/public/locales/en/explorer.json @@ -0,0 +1,4 @@ +{ + "name_of_the_view": "Name of the view", + "delete_confirm_message": "Are you sure you want to delete {{viewName}} view? Deleting a view is irreversible and cannot be undone." +} \ No newline at end of file diff --git a/signoz/frontend/public/locales/en/generalSettings.json b/signoz/frontend/public/locales/en/generalSettings.json new file mode 100644 index 0000000..1519873 --- /dev/null +++ b/signoz/frontend/public/locales/en/generalSettings.json @@ -0,0 +1,21 @@ +{ + "total_retention_period": "Total Retention Period", + "move_to_s3": "Move to S3\n(should be lower than total retention period)", + "status_message": { + "success": "Your last call to change retention period to {{total_retention}} {{s3_part}} was successful.", + "failed": "Your last call to change retention period to {{total_retention}} {{s3_part}} failed. Please try again.", + "pending": "Your last call to change retention period to {{total_retention}} {{s3_part}} is pending. This may take some time.", + "s3_part": "and S3 to {{s3_retention}}" + }, + "retention_save_button": { + "pending": "Updating {{name}} retention period", + "success": "Save" + }, + "retention_request_race_condition": "Your request to change retention period has failed, as another request is still in process.", + "retention_error_message": "There was an issue in changing the retention period for {{name}}. Please try again or reach out to support@signoz.io", + "retention_failed_message": "There was an issue in changing the retention period. Please try again or reach out to support@signoz.io", + "retention_comparison_error": "Total retention period for {{name}} can’t be lower or equal to the period after which data is moved to s3.", + "retention_null_value_error": "Retention Period for {{name}} is not set yet. Please set by choosing below", + "retention_confirmation": "Are you sure you want to change the retention period?", + "retention_confirmation_description": "This will change the amount of storage needed for saving {{name}}." +} diff --git a/signoz/frontend/public/locales/en/ingestionKeys.json b/signoz/frontend/public/locales/en/ingestionKeys.json new file mode 100644 index 0000000..58ebf8a --- /dev/null +++ b/signoz/frontend/public/locales/en/ingestionKeys.json @@ -0,0 +1,4 @@ +{ + "delete_confirm_message": "Are you sure you want to delete {{keyName}}? Deleting an ingestion key is irreversible and cannot be undone.", + "delete_limit_confirm_message": "Are you sure you want to delete {{limit_name}} limit for ingestion key {{keyName}}?" +} diff --git a/signoz/frontend/public/locales/en/licenses.json b/signoz/frontend/public/locales/en/licenses.json new file mode 100644 index 0000000..ed7c3ee --- /dev/null +++ b/signoz/frontend/public/locales/en/licenses.json @@ -0,0 +1,13 @@ +{ + "column_license_key": "License Key", + "column_valid_from": "Valid From", + "column_valid_until": "Valid Until", + "column_license_status": "Status", + "button_apply": "Apply", + "placeholder_license_key": "Enter a License Key", + "tab_current_license": "Current License", + "tab_license_history": "History", + "loading_licenses": "Loading licenses...", + "enter_license_key": "Please enter a license key", + "license_applied": "License applied successfully" +} diff --git a/signoz/frontend/public/locales/en/login.json b/signoz/frontend/public/locales/en/login.json new file mode 100644 index 0000000..84c9e1d --- /dev/null +++ b/signoz/frontend/public/locales/en/login.json @@ -0,0 +1,22 @@ +{ + "label_email": "Email", + "placeholder_email": "name@yourcompany.com", + "label_password": "Password", + "button_initiate_login": "Next", + "button_login": "Login", + "login_page_title": "Login with SigNoz", + "login_with_sso": "Login with SSO", + "login_with_pwd": "Login with password", + "forgot_password": "Forgot password?", + "create_an_account": "Create an account", + "prompt_if_admin": "If you are admin,", + "prompt_create_account": "If you are setting up SigNoz for the first time,", + "prompt_no_account": "Don't have an account? Contact your admin to send you an invite link.", + "prompt_forgot_password": "Ask your admin to reset your password and send you a new invite link", + "prompt_on_sso_error": "Are you trying to resolve SSO configuration issue?", + "unexpected_error": "Sorry, something went wrong", + "failed_to_login": "sorry, failed to login", + "invalid_email": "Please enter a valid email address", + "invalid_account": "This account does not exist. To create a new account, contact your admin to get an invite link", + "invalid_config": "Invalid configuration detected, please contact your administrator" +} \ No newline at end of file diff --git a/signoz/frontend/public/locales/en/logs.json b/signoz/frontend/public/locales/en/logs.json new file mode 100644 index 0000000..804f66f --- /dev/null +++ b/signoz/frontend/public/locales/en/logs.json @@ -0,0 +1 @@ +{ "fetching_log_lines": "Fetching log lines" } diff --git a/signoz/frontend/public/locales/en/onboarding.json b/signoz/frontend/public/locales/en/onboarding.json new file mode 100644 index 0000000..5732826 --- /dev/null +++ b/signoz/frontend/public/locales/en/onboarding.json @@ -0,0 +1,8 @@ +{ + "invite_user": "Invite your teammates", + "invite": "Invite", + "skip": "Skip", + "invite_user_helper_text": "Not the right person to get started? No worries! Invite someone who can.", + "select_use_case": "Select a use-case to get started", + "get_started": "Get Started" +} diff --git a/signoz/frontend/public/locales/en/organizationsettings.json b/signoz/frontend/public/locales/en/organizationsettings.json new file mode 100644 index 0000000..74654d9 --- /dev/null +++ b/signoz/frontend/public/locales/en/organizationsettings.json @@ -0,0 +1,18 @@ +{ + "display_name": "Display Name", + "signoz": "SigNoz", + "email_address": "Email address", + "name_optional": "Name (optional)", + "role": "Role", + "email_placeholder": "john@signoz.io", + "name_placeholder": "John", + "add_another_team_member": "Add another team member", + "invite_team_members": "Invite team members", + "invite_members": "Invite Members", + "pending_invites": "Pending Invites", + "authenticated_domains": "Authenticated Domains", + "delete_domain_message": "Are you sure you want to delete this domain?", + "delete_domain": "Delete Domain", + "add_domain": "Add Domains", + "saml_settings": "Your SAML settings have been saved, please login from incognito window to confirm that it has been set up correctly" +} diff --git a/signoz/frontend/public/locales/en/pipeline.json b/signoz/frontend/public/locales/en/pipeline.json new file mode 100644 index 0000000..d4b95bb --- /dev/null +++ b/signoz/frontend/public/locales/en/pipeline.json @@ -0,0 +1,46 @@ +{ + "delete": "Delete", + "filter": "Filter", + "update": "Update", + "create": "Create", + "reorder": "Reorder", + "cancel": "Cancel", + "learn_more": "Learn more about pipelines", + "reorder_pipeline": "Do you want to reorder pipeline?", + "reorder_pipeline_description": "Logs are processed sequentially in processors and pipelines. Reordering it may change how data is processed by them.", + "delete_pipeline": "Do you want to delete pipeline", + "delete_pipeline_description": "Logs are processed sequentially in processors and pipelines. Deleting a pipeline may change content of data processed by other pipelines & processors", + "add_new_pipeline": "Add a New Pipeline", + "new_pipeline": "New Pipeline", + "enter_edit_mode": "Enter Edit Mode", + "save_configuration": "Save Configuration", + "edit_pipeline": "Edit Pipeline", + "create_pipeline": "Create New Pipeline", + "add_new_processor": "Add Processor", + "edit_processor": "Edit Processor", + "create_processor": "Create New Processor", + "processor_type": "Select Processor Type", + "reorder_processor": "Do you want to reorder processor?", + "reorder_processor_description": "Logs are processed sequentially in processors. Reordering it may change how data is processed by them.", + "delete_processor": "Do you want to delete processor", + "delete_processor_description": "Logs are processed sequentially in processors. Deleting a processor may change content of data processed by other processors", + "search_pipeline_placeholder": "Filter Pipelines", + "pipeline_name_placeholder": "Name", + "pipeline_filter_placeholder": "Filter for selecting logs to be processed by this pipeline. Example: service_name = billing", + "pipeline_tags_placeholder": "Tags", + "pipeline_description_placeholder": "Enter description for your pipeline", + "processor_name_placeholder": "Name", + "processor_regex_placeholder": "Regex", + "processor_parsefrom_placeholder": "Parse From", + "processor_parseto_placeholder": "Parse To", + "processor_onerror_placeholder": "on Error", + "processor_pattern_placeholder": "Pattern", + "processor_field_placeholder": "Field", + "processor_value_placeholder": "Value", + "processor_description_placeholder": "example rule: %{word:first}", + "processor_trace_id_placeholder": "Parse Trace ID from", + "processor_span_id_placeholder": "Parse Span ID from", + "processor_trace_flags_placeholder": "Parse Trace flags from", + "processor_from_placeholder": "From", + "processor_to_placeholder": "To" +} diff --git a/signoz/frontend/public/locales/en/routes.json b/signoz/frontend/public/locales/en/routes.json new file mode 100644 index 0000000..ede3f46 --- /dev/null +++ b/signoz/frontend/public/locales/en/routes.json @@ -0,0 +1,15 @@ +{ + "general": "General", + "alert_channels": "Alert Channels", + "organization_settings": "Organization Settings", + "ingestion_settings": "Ingestion Settings", + "api_keys": "Access Tokens", + "my_settings": "My Settings", + "overview_metrics": "Overview Metrics", + "dbcall_metrics": "Database Calls", + "external_metrics": "External Calls", + "pipeline": "Pipeline", + "pipelines": "Pipelines", + "archives": "Archives", + "logs_to_metrics": "Logs To Metrics" +} diff --git a/signoz/frontend/public/locales/en/rules.json b/signoz/frontend/public/locales/en/rules.json new file mode 100644 index 0000000..9d55a0b --- /dev/null +++ b/signoz/frontend/public/locales/en/rules.json @@ -0,0 +1,86 @@ +{ + "preview_chart_unexpected_error": "An unexpeced error occurred updating the chart, please check your query.", + "preview_chart_threshold_label": "Threshold", + "placeholder_label_key_pair": "Click here to enter a label (key value pairs)", + "button_yes": "Yes", + "button_no": "No", + "remove_label_confirm": "This action will remove all the labels. Do you want to proceed?", + "remove_label_success": "Labels cleared", + "alert_form_step1": "Step 1 - Define the metric", + "alert_form_step2": "Step 2 - Define Alert Conditions", + "alert_form_step3": "Step 3 - Alert Configuration", + "metric_query_max_limit": "Can not create query. You can create maximum of 5 queries", + "confirm_save_title": "Save Changes", + "confirm_save_content_part1": "Your alert built with", + "confirm_save_content_part2": "query will be saved. Press OK to confirm.", + "unexpected_error": "Sorry, an unexpected error occurred. Please contact your admin", + "rule_created": "Rule created successfully", + "rule_edited": "Rule edited successfully", + "expression_missing": "expression is missing in {{where}}", + "metricname_missing": "metric name is missing in {{where}}", + "condition_required": "at least one metric condition is required", + "alertname_required": "alert name is required", + "promql_required": "promql expression is required when query format is set to PromQL", + "button_savechanges": "Save Rule", + "button_createrule": "Create Rule", + "button_returntorules": "Return to rules", + "button_cancelchanges": "Cancel", + "button_discard": "Discard", + "text_condition1": "Send a notification when", + "text_condition2": "the threshold", + "text_condition3": "during the last", + "option_5min": "5 mins", + "option_10min": "10 mins", + "option_15min": "15 mins", + "option_60min": "60 mins", + "option_4hours": "4 hours", + "option_24hours": "24 hours", + "field_threshold": "Alert Threshold", + "option_allthetimes": "all the times", + "option_atleastonce": "at least once", + "option_onaverage": "on average", + "option_intotal": "in total", + "option_above": "above", + "option_below": "below", + "option_equal": "is equal to", + "option_notequal": "not equal to", + "button_query": "Query", + "button_formula": "Formula", + "tab_qb": "Query Builder", + "tab_promql": "PromQL", + "title_confirm": "Confirm", + "button_ok": "Yes", + "button_cancel": "No", + "field_promql_expr": "PromQL Expression", + "field_alert_name": "Alert Name", + "field_alert_desc": "Alert Description", + "field_notification_channel": "Notification Channel", + "field_labels": "Labels", + "field_severity": "Severity", + "option_critical": "Critical", + "option_error": "Error", + "option_warning": "Warning", + "option_info": "Info", + "user_guide_headline": "Steps to create an Alert", + "user_guide_qb_step1": "Step 1 - Define the metric", + "user_guide_qb_step1a": "Choose a metric which you want to create an alert on", + "user_guide_qb_step1b": "Filter it based on WHERE field or GROUPBY if needed", + "user_guide_qb_step1c": "Apply an aggregatiion function like COUNT, SUM, etc. or choose NOOP to plot the raw metric", + "user_guide_qb_step1d": "Create a formula based on Queries if needed", + "user_guide_qb_step2": "Step 2 - Define Alert Conditions", + "user_guide_qb_step2a": "Select the evaluation interval, threshold type and whether you want to alert above/below a value", + "user_guide_qb_step2b": "Enter the Alert threshold", + "user_guide_qb_step3": "Step 3 -Alert Configuration", + "user_guide_qb_step3a": "Set alert severity, name and descriptions", + "user_guide_qb_step3b": "Add tags to the alert in the Label field if needed", + "user_guide_pql_step1": "Step 1 - Define the metric", + "user_guide_pql_step1a": "Write a PromQL query for the metric", + "user_guide_pql_step1b": "Format the legends based on labels you want to highlight", + "user_guide_pql_step2": "Step 2 - Define Alert Conditions", + "user_guide_pql_step2a": "Select the threshold type and whether you want to alert above/below a value", + "user_guide_pql_step2b": "Enter the Alert threshold", + "user_guide_pql_step3": "Step 3 -Alert Configuration", + "user_guide_pql_step3a": "Set alert severity, name and descriptions", + "user_guide_pql_step3b": "Add tags to the alert in the Label field if needed", + "user_tooltip_more_help": "More details on how to create alerts" +} diff --git a/signoz/frontend/public/locales/en/services.json b/signoz/frontend/public/locales/en/services.json new file mode 100644 index 0000000..4c49847 --- /dev/null +++ b/signoz/frontend/public/locales/en/services.json @@ -0,0 +1,3 @@ +{ + "rps_over_100": "You are sending data at more than 100 RPS, your ingestion may be rate limited. Please reach out to us via Intercom support." +} diff --git a/signoz/frontend/public/locales/en/settings.json b/signoz/frontend/public/locales/en/settings.json new file mode 100644 index 0000000..94a4f71 --- /dev/null +++ b/signoz/frontend/public/locales/en/settings.json @@ -0,0 +1,6 @@ +{ + "current_password": "Current Password", + "new_password": "New Password", + "change_password": "Change Password", + "input_password": "input password" +} diff --git a/signoz/frontend/public/locales/en/signup.json b/signoz/frontend/public/locales/en/signup.json new file mode 100644 index 0000000..9e0d586 --- /dev/null +++ b/signoz/frontend/public/locales/en/signup.json @@ -0,0 +1,18 @@ +{ + "label_email": "Email", + "placeholder_email": "name@yourcompany.com", + "label_password": "Password", + "label_confirm_password": "Confirm Password", + "label_firstname": "First Name", + "placeholder_firstname": "Your Name", + "label_orgname": "Organization Name", + "placeholder_orgname": "Your Company", + "prompt_keepme_posted": "Keep me updated on new SigNoz features", + "prompt_anonymise": "Anonymise my usage data. We collect data to measure product usage", + "failed_confirm_password": "Passwords don’t match. Please try again", + "unexpected_error": "Something went wrong", + "failed_to_initiate_login": "Signup completed but failed to initiate login", + "token_required": "Invite token is required for signup, please request one from your admin", + "button_get_started": "Get Started", + "prompt_admin_warning": "This will create an admin account. If you are not an admin, please ask your admin for an invite link" +} \ No newline at end of file diff --git a/signoz/frontend/public/locales/en/titles.json b/signoz/frontend/public/locales/en/titles.json new file mode 100644 index 0000000..f77bf0e --- /dev/null +++ b/signoz/frontend/public/locales/en/titles.json @@ -0,0 +1,53 @@ +{ + "SIGN_UP": "SigNoz | Sign Up", + "LOGIN": "SigNoz | Login", + "SERVICE_METRICS": "SigNoz | Service Metrics", + "SERVICE_MAP": "SigNoz | Service Map", + "GET_STARTED": "SigNoz | Get Started", + "GET_STARTED_APPLICATION_MONITORING": "SigNoz | Get Started | APM", + "GET_STARTED_LOGS_MANAGEMENT": "SigNoz | Get Started | Logs", + "GET_STARTED_INFRASTRUCTURE_MONITORING": "SigNoz | Get Started | Infrastructure", + "GET_STARTED_AWS_MONITORING": "SigNoz | Get Started | AWS", + "GET_STARTED_AZURE_MONITORING": "SigNoz | Get Started | AZURE", + "TRACE": "SigNoz | Trace", + "TRACE_DETAIL": "SigNoz | Trace Detail", + "TRACES_EXPLORER": "SigNoz | Traces Explorer", + "SETTINGS": "SigNoz | Settings", + "USAGE_EXPLORER": "SigNoz | Usage Explorer", + "APPLICATION": "SigNoz | Home", + "BILLING": "SigNoz | Billing", + "ALL_DASHBOARD": "SigNoz | All Dashboards", + "DASHBOARD": "SigNoz | Dashboard", + "DASHBOARD_WIDGET": "SigNoz | Dashboard Widget", + "EDIT_ALERTS": "SigNoz | Edit Alerts", + "LIST_ALL_ALERT": "SigNoz | All Alerts", + "ALERTS_NEW": "SigNoz | New Alert", + "ALL_CHANNELS": "SigNoz | All Channels", + "CHANNELS_NEW": "SigNoz | New Channel", + "CHANNELS_EDIT": "SigNoz | Edit Channel", + "ALL_ERROR": "SigNoz | All Errors", + "ERROR_DETAIL": "SigNoz | Error Detail", + "VERSION": "SigNoz | Version", + "MY_SETTINGS": "SigNoz | My Settings", + "ORG_SETTINGS": "SigNoz | Organization Settings", + "INGESTION_SETTINGS": "SigNoz | Ingestion Settings", + "API_KEYS": "SigNoz | Access Tokens", + "SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong", + "UN_AUTHORIZED": "SigNoz | Unauthorized", + "NOT_FOUND": "SigNoz | Page Not Found", + "LOGS": "SigNoz | Logs", + "LOGS_EXPLORER": "SigNoz | Logs Explorer", + "OLD_LOGS_EXPLORER": "SigNoz | Old Logs Explorer", + "LIVE_LOGS": "SigNoz | Live Logs", + "LOGS_PIPELINES": "SigNoz | Logs Pipelines", + "HOME_PAGE": "Open source Observability Platform | SigNoz", + "PASSWORD_RESET": "SigNoz | Password Reset", + "LIST_LICENSES": "SigNoz | List of Licenses", + "WORKSPACE_LOCKED": "SigNoz | Workspace Locked", + "SUPPORT": "SigNoz | Support", + "LOGS_SAVE_VIEWS": "SigNoz | Logs Saved Views", + "TRACES_SAVE_VIEWS": "SigNoz | Traces Saved Views", + "DEFAULT": "Open source Observability Platform | SigNoz", + "SHORTCUTS": "SigNoz | Shortcuts", + "INTEGRATIONS": "SigNoz | Integrations" +} diff --git a/signoz/frontend/public/locales/en/trace.json b/signoz/frontend/public/locales/en/trace.json new file mode 100644 index 0000000..c1e5519 --- /dev/null +++ b/signoz/frontend/public/locales/en/trace.json @@ -0,0 +1,11 @@ +{ + "options_menu": { + "options": "Options", + "format": "Format", + "raw": "Raw", + "default": "Default", + "column": "Column", + "maxLines": "Max lines per Row", + "addColumn": "Add a column" + } +} diff --git a/signoz/frontend/public/locales/en/traceDetails.json b/signoz/frontend/public/locales/en/traceDetails.json new file mode 100644 index 0000000..bb61ff2 --- /dev/null +++ b/signoz/frontend/public/locales/en/traceDetails.json @@ -0,0 +1,3 @@ +{ + "search_tags": "Search Tag Names" +} diff --git a/signoz/frontend/public/locales/en/translation.json b/signoz/frontend/public/locales/en/translation.json new file mode 100644 index 0000000..08b795d --- /dev/null +++ b/signoz/frontend/public/locales/en/translation.json @@ -0,0 +1,19 @@ +{ + "monitor_signup": "Monitor your applications. Find what is causing issues.", + "version": "Version", + "latest_version": "Latest version", + "current_version": "Current version", + "release_notes": "Release Notes", + "read_how_to_upgrade": "Read instructions on how to upgrade", + "latest_version_signoz": "You are running the latest version of SigNoz.", + "stale_version": "You are on an older version and may be missing out on the latest features we have shipped. We recommend to upgrade to the latest version", + "oops_something_went_wrong_version": "Oops.. facing issues with fetching updated version information", + "n_a": "N/A", + "routes": { + "general": "General", + "alert_channels": "Alert Channels", + "all_errors": "All Exceptions", + "index_fields": "Index Fields", + "pipelines": "Pipelines" + } +} diff --git a/signoz/frontend/public/locales/en/valueGraph.json b/signoz/frontend/public/locales/en/valueGraph.json new file mode 100644 index 0000000..76aa50e --- /dev/null +++ b/signoz/frontend/public/locales/en/valueGraph.json @@ -0,0 +1,3 @@ +{ + "this_value_satisfies_multiple_thresholds": "This value satisfies multiple thresholds." +} \ No newline at end of file diff --git a/signoz/frontend/public/manifest.json b/signoz/frontend/public/manifest.json new file mode 100644 index 0000000..7c86642 --- /dev/null +++ b/signoz/frontend/public/manifest.json @@ -0,0 +1,25 @@ +{ + "short_name": "React App", + "name": "Create React App Sample", + "icons": [ + { + "src": "favicon.ico", + "sizes": "64x64 32x32 24x24 16x16", + "type": "image/x-icon" + }, + { + "src": "logo192.png", + "type": "image/png", + "sizes": "192x192" + }, + { + "src": "logo512.png", + "type": "image/png", + "sizes": "512x512" + } + ], + "start_url": ".", + "display": "standalone", + "theme_color": "#000000", + "background_color": "#ffffff" +} diff --git a/signoz/frontend/public/robots.txt b/signoz/frontend/public/robots.txt new file mode 100644 index 0000000..e9e57dc --- /dev/null +++ b/signoz/frontend/public/robots.txt @@ -0,0 +1,3 @@ +# https://www.robotstxt.org/robotstxt.html +User-agent: * +Disallow: diff --git a/signoz/frontend/public/signoz-signup.svg b/signoz/frontend/public/signoz-signup.svg new file mode 100644 index 0000000..67c45b1 --- /dev/null +++ b/signoz/frontend/public/signoz-signup.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/signoz/frontend/public/signoz.svg b/signoz/frontend/public/signoz.svg new file mode 100644 index 0000000..cdfe945 --- /dev/null +++ b/signoz/frontend/public/signoz.svg @@ -0,0 +1,4 @@ + + + + diff --git a/signoz/frontend/scripts/typecheck-staged.sh b/signoz/frontend/scripts/typecheck-staged.sh new file mode 100644 index 0000000..0990e81 --- /dev/null +++ b/signoz/frontend/scripts/typecheck-staged.sh @@ -0,0 +1,25 @@ +files=""; + +# lint-staged will pass all files in $1 $2 $3 etc. iterate and concat. +for var in "$@" +do + files="$files \"$var\"," +done + +# create temporary tsconfig which includes only passed files +str="{ + \"extends\": \"./tsconfig.json\", + \"include\": [ \"src/typings/**/*.ts\",\"src/**/*.d.ts\", \"./babel.config.js\", \"./jest.config.ts\", \"./.eslintrc.js\",\"./__mocks__\",\"./conf/default.conf\",\"./public\",\"./tests\",\"./playwright.config.ts\",\"./commitlint.config.ts\",\"./webpack.config.js\",\"./webpack.config.prod.js\",\"./jest.setup.ts\",\"./**/*.d.ts\",$files] +}" +echo $str > tsconfig.tmp + +# run typecheck using temp config +tsc -p ./tsconfig.tmp + +# capture exit code of tsc +code=$? + +# delete temp config +rm ./tsconfig.tmp + +exit $code diff --git a/signoz/frontend/sonar-project.properties b/signoz/frontend/sonar-project.properties new file mode 100644 index 0000000..c8676c9 --- /dev/null +++ b/signoz/frontend/sonar-project.properties @@ -0,0 +1,6 @@ +sonar.organization=signoz +sonar.projectKey=SigNoz_signoz + +# relative paths to source directories. More details and properties are described +# in https://sonarcloud.io/documentation/project-administration/narrowing-the-focus/ +sonar.sources=./src \ No newline at end of file diff --git a/signoz/frontend/src/AppRoutes/Private.tsx b/signoz/frontend/src/AppRoutes/Private.tsx new file mode 100644 index 0000000..669def6 --- /dev/null +++ b/signoz/frontend/src/AppRoutes/Private.tsx @@ -0,0 +1,231 @@ +/* eslint-disable react-hooks/exhaustive-deps */ +import getLocalStorageApi from 'api/browser/localstorage/get'; +import loginApi from 'api/user/login'; +import { Logout } from 'api/utils'; +import Spinner from 'components/Spinner'; +import { LOCALSTORAGE } from 'constants/localStorage'; +import ROUTES from 'constants/routes'; +import useLicense from 'hooks/useLicense'; +import { useNotifications } from 'hooks/useNotifications'; +import history from 'lib/history'; +import { ReactChild, useEffect, useMemo } from 'react'; +import { useTranslation } from 'react-i18next'; +import { useDispatch, useSelector } from 'react-redux'; +import { matchPath, Redirect, useLocation } from 'react-router-dom'; +import { Dispatch } from 'redux'; +import { AppState } from 'store/reducers'; +import { getInitialUserTokenRefreshToken } from 'store/utils'; +import AppActions from 'types/actions'; +import { UPDATE_USER_IS_FETCH } from 'types/actions/app'; +import AppReducer from 'types/reducer/app'; +import { routePermission } from 'utils/permission'; + +import routes, { + LIST_LICENSES, + oldNewRoutesMapping, + oldRoutes, +} from './routes'; +import afterLogin from './utils'; + +function PrivateRoute({ children }: PrivateRouteProps): JSX.Element { + const location = useLocation(); + const { pathname } = location; + + const mapRoutes = useMemo( + () => + new Map( + [...routes, LIST_LICENSES].map((e) => { + const currentPath = matchPath(pathname, { + path: e.path, + }); + return [currentPath === null ? null : 'current', e]; + }), + ), + [pathname], + ); + + const { + data: licensesData, + isFetching: isFetchingLicensesData, + } = useLicense(); + + const { + isUserFetching, + isUserFetchingError, + isLoggedIn: isLoggedInState, + } = useSelector((state) => state.app); + + const { t } = useTranslation(['common']); + const localStorageUserAuthToken = getInitialUserTokenRefreshToken(); + + const dispatch = useDispatch>(); + + const { notifications } = useNotifications(); + + const currentRoute = mapRoutes.get('current'); + + const isOldRoute = oldRoutes.indexOf(pathname) > -1; + + const isLocalStorageLoggedIn = + getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN) === 'true'; + + const navigateToLoginIfNotLoggedIn = (isLoggedIn = isLoggedInState): void => { + dispatch({ + type: UPDATE_USER_IS_FETCH, + payload: { + isUserFetching: false, + }, + }); + + if (!isLoggedIn) { + history.push(ROUTES.LOGIN); + } + }; + + const handleUserLoginIfTokenPresent = async ( + key: keyof typeof ROUTES, + ): Promise => { + if (localStorageUserAuthToken?.refreshJwt) { + // localstorage token is present + + // renew web access token + const response = await loginApi({ + refreshToken: localStorageUserAuthToken?.refreshJwt, + }); + + if (response.statusCode === 200) { + const route = routePermission[key]; + + // get all resource and put it over redux + const userResponse = await afterLogin( + response.payload.userId, + response.payload.accessJwt, + response.payload.refreshJwt, + ); + + if ( + userResponse && + route && + route.find((e) => e === userResponse.payload.role) === undefined + ) { + history.push(ROUTES.UN_AUTHORIZED); + } + } else { + Logout(); + + notifications.error({ + message: response.error || t('something_went_wrong'), + }); + } + } + }; + + const handlePrivateRoutes = async ( + key: keyof typeof ROUTES, + ): Promise => { + if ( + localStorageUserAuthToken && + localStorageUserAuthToken.refreshJwt && + isUserFetching + ) { + handleUserLoginIfTokenPresent(key); + } else { + // user does have localstorage values + + navigateToLoginIfNotLoggedIn(isLocalStorageLoggedIn); + } + }; + + const navigateToWorkSpaceBlocked = (route: any): void => { + const { path } = route; + + if (path && path !== ROUTES.WORKSPACE_LOCKED) { + history.push(ROUTES.WORKSPACE_LOCKED); + + dispatch({ + type: UPDATE_USER_IS_FETCH, + payload: { + isUserFetching: false, + }, + }); + } + }; + + useEffect(() => { + if (!isFetchingLicensesData) { + const shouldBlockWorkspace = licensesData?.payload?.workSpaceBlock; + + if (shouldBlockWorkspace) { + navigateToWorkSpaceBlocked(currentRoute); + } + } + }, [isFetchingLicensesData]); + + // eslint-disable-next-line sonarjs/cognitive-complexity + useEffect(() => { + (async (): Promise => { + try { + if (isOldRoute) { + const redirectUrl = oldNewRoutesMapping[pathname]; + + const newLocation = { + ...location, + pathname: redirectUrl, + }; + history.replace(newLocation); + } + + if (currentRoute) { + const { isPrivate, key } = currentRoute; + + if (isPrivate && key !== String(ROUTES.WORKSPACE_LOCKED)) { + handlePrivateRoutes(key); + } else { + // no need to fetch the user and make user fetching false + + if (getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN) === 'true') { + history.push(ROUTES.APPLICATION); + } + dispatch({ + type: UPDATE_USER_IS_FETCH, + payload: { + isUserFetching: false, + }, + }); + } + } else if (pathname === ROUTES.HOME_PAGE) { + // routing to application page over root page + if (isLoggedInState) { + history.push(ROUTES.APPLICATION); + } else { + navigateToLoginIfNotLoggedIn(); + } + } else { + // not found + navigateToLoginIfNotLoggedIn(isLocalStorageLoggedIn); + } + } catch (error) { + // something went wrong + history.push(ROUTES.SOMETHING_WENT_WRONG); + } + })(); + }, [dispatch, isLoggedInState, currentRoute, licensesData]); + + if (isUserFetchingError) { + return ; + } + + if (isUserFetching) { + return ; + } + + // NOTE: disabling this rule as there is no need to have div + // eslint-disable-next-line react/jsx-no-useless-fragment + return <>{children}; +} + +interface PrivateRouteProps { + children: ReactChild; +} + +export default PrivateRoute; diff --git a/signoz/frontend/src/AppRoutes/index.tsx b/signoz/frontend/src/AppRoutes/index.tsx new file mode 100644 index 0000000..ac225db --- /dev/null +++ b/signoz/frontend/src/AppRoutes/index.tsx @@ -0,0 +1,258 @@ +import { ConfigProvider } from 'antd'; +import getLocalStorageApi from 'api/browser/localstorage/get'; +import setLocalStorageApi from 'api/browser/localstorage/set'; +import logEvent from 'api/common/logEvent'; +import NotFound from 'components/NotFound'; +import Spinner from 'components/Spinner'; +import { FeatureKeys } from 'constants/features'; +import { LOCALSTORAGE } from 'constants/localStorage'; +import ROUTES from 'constants/routes'; +import AppLayout from 'container/AppLayout'; +import useAnalytics from 'hooks/analytics/useAnalytics'; +import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys'; +import { useIsDarkMode, useThemeConfig } from 'hooks/useDarkMode'; +import { THEME_MODE } from 'hooks/useDarkMode/constant'; +import useGetFeatureFlag from 'hooks/useGetFeatureFlag'; +import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense'; +import { NotificationProvider } from 'hooks/useNotifications'; +import { ResourceProvider } from 'hooks/useResourceAttribute'; +import history from 'lib/history'; +import { identity, pick, pickBy } from 'lodash-es'; +import posthog from 'posthog-js'; +import { DashboardProvider } from 'providers/Dashboard/Dashboard'; +import { QueryBuilderProvider } from 'providers/QueryBuilder'; +import { Suspense, useEffect, useState } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; +import { Route, Router, Switch } from 'react-router-dom'; +import { Dispatch } from 'redux'; +import { AppState } from 'store/reducers'; +import AppActions from 'types/actions'; +import { UPDATE_FEATURE_FLAG_RESPONSE } from 'types/actions/app'; +import AppReducer, { User } from 'types/reducer/app'; +import { extractDomain, isCloudUser, isEECloudUser } from 'utils/app'; + +import PrivateRoute from './Private'; +import defaultRoutes, { + AppRoutes, + LIST_LICENSES, + SUPPORT_ROUTE, +} from './routes'; + +function App(): JSX.Element { + const themeConfig = useThemeConfig(); + const { data: licenseData } = useLicense(); + const [routes, setRoutes] = useState(defaultRoutes); + const { role, isLoggedIn: isLoggedInState, user, org } = useSelector< + AppState, + AppReducer + >((state) => state.app); + + const dispatch = useDispatch>(); + + const { trackPageView } = useAnalytics(); + + const { hostname, pathname } = window.location; + + const isCloudUserVal = isCloudUser(); + + const isDarkMode = useIsDarkMode(); + + const featureResponse = useGetFeatureFlag((allFlags) => { + const isOnboardingEnabled = + allFlags.find((flag) => flag.name === FeatureKeys.ONBOARDING)?.active || + false; + + const isChatSupportEnabled = + allFlags.find((flag) => flag.name === FeatureKeys.CHAT_SUPPORT)?.active || + false; + + dispatch({ + type: UPDATE_FEATURE_FLAG_RESPONSE, + payload: { + featureFlag: allFlags, + refetch: featureResponse.refetch, + }, + }); + + if (!isOnboardingEnabled || !isCloudUserVal) { + const newRoutes = routes.filter( + (route) => route?.path !== ROUTES.GET_STARTED, + ); + + setRoutes(newRoutes); + } + + if (isLoggedInState && isChatSupportEnabled) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + window.Intercom('boot', { + app_id: process.env.INTERCOM_APP_ID, + email: user?.email || '', + name: user?.name || '', + }); + } + }); + + const isOnBasicPlan = + licenseData?.payload?.licenses?.some( + (license) => + license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN, + ) || licenseData?.payload?.licenses === null; + + const enableAnalytics = (user: User): void => { + const orgName = + org && Array.isArray(org) && org.length > 0 ? org[0].name : ''; + + const { name, email } = user; + + const identifyPayload = { + email, + name, + company_name: orgName, + role, + source: 'signoz-ui', + }; + + const sanitizedIdentifyPayload = pickBy(identifyPayload, identity); + const domain = extractDomain(email); + const hostNameParts = hostname.split('.'); + + const groupTraits = { + name: orgName, + tenant_id: hostNameParts[0], + data_region: hostNameParts[1], + tenant_url: hostname, + company_domain: domain, + source: 'signoz-ui', + }; + + window.analytics.identify(email, sanitizedIdentifyPayload); + window.analytics.group(domain, groupTraits); + window.clarity('identify', email, name); + + posthog?.identify(email, { + email, + name, + orgName, + tenant_id: hostNameParts[0], + data_region: hostNameParts[1], + tenant_url: hostname, + company_domain: domain, + source: 'signoz-ui', + isPaidUser: !!licenseData?.payload?.trialConvertedToSubscription, + }); + + posthog?.group('company', domain, { + name: orgName, + tenant_id: hostNameParts[0], + data_region: hostNameParts[1], + tenant_url: hostname, + company_domain: domain, + source: 'signoz-ui', + isPaidUser: !!licenseData?.payload?.trialConvertedToSubscription, + }); + }; + + useEffect(() => { + const isIdentifiedUser = getLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER); + + if ( + isLoggedInState && + user && + user.userId && + user.email && + !isIdentifiedUser + ) { + setLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER, 'true'); + } + + if ( + isOnBasicPlan || + (isLoggedInState && role && role !== 'ADMIN') || + !(isCloudUserVal || isEECloudUser()) + ) { + const newRoutes = routes.filter((route) => route?.path !== ROUTES.BILLING); + setRoutes(newRoutes); + } + + if (isCloudUserVal || isEECloudUser()) { + const newRoutes = [...routes, SUPPORT_ROUTE]; + + setRoutes(newRoutes); + } else { + const newRoutes = [...routes, LIST_LICENSES]; + + setRoutes(newRoutes); + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isLoggedInState, isOnBasicPlan, user]); + + useEffect(() => { + trackPageView(pathname); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [pathname]); + + useEffect(() => { + if (user && user?.email && user?.userId && user?.name) { + try { + const isThemeAnalyticsSent = getLocalStorageApi( + LOCALSTORAGE.THEME_ANALYTICS_V1, + ); + if (!isThemeAnalyticsSent) { + logEvent('Theme Analytics', { + theme: isDarkMode ? THEME_MODE.DARK : THEME_MODE.LIGHT, + user: pick(user, ['email', 'userId', 'name']), + org, + }); + setLocalStorageApi(LOCALSTORAGE.THEME_ANALYTICS_V1, 'true'); + } + } catch { + console.error('Failed to parse local storage theme analytics event'); + } + } + + if (isCloudUserVal && user && user.email) { + enableAnalytics(user); + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [user]); + + return ( + + + + + + + + + + }> + + {routes.map(({ path, component, exact }) => ( + + ))} + + + + + + + + + + + + + + ); +} + +export default App; diff --git a/signoz/frontend/src/AppRoutes/pageComponents.ts b/signoz/frontend/src/AppRoutes/pageComponents.ts new file mode 100644 index 0000000..9275e7d --- /dev/null +++ b/signoz/frontend/src/AppRoutes/pageComponents.ts @@ -0,0 +1,206 @@ +import Loadable from 'components/Loadable'; + +export const ServicesTablePage = Loadable( + () => import(/* webpackChunkName: "ServicesTablePage" */ 'pages/Services'), +); + +export const ServiceMetricsPage = Loadable( + () => + import( + /* webpackChunkName: "ServiceMetricsPage" */ 'pages/MetricsApplication/MetricsApplication' + ), +); + +export const ServiceTopLevelOperationsPage = Loadable( + () => + import( + /* webpackChunkName: "ServiceMetricsPage" */ 'pages/ServiceTopLevelOperations' + ), +); + +export const ServiceMapPage = Loadable( + () => import(/* webpackChunkName: "ServiceMapPage" */ 'modules/Servicemap'), +); + +export const LogsSaveViews = Loadable( + () => import(/* webpackChunkName: "LogsSaveViews" */ 'pages/LogsModulePage'), // TODO: Add a wrapper so that the same component can be used in traces +); + +export const TracesExplorer = Loadable( + () => + import( + /* webpackChunkName: "Traces Explorer Page" */ 'pages/TracesModulePage' + ), +); + +export const TracesSaveViews = Loadable( + () => + import(/* webpackChunkName: "Traces Save Views" */ 'pages/TracesModulePage'), +); + +export const TraceFilter = Loadable( + () => import(/* webpackChunkName: "Trace Filter Page" */ 'pages/Trace'), +); + +export const TraceDetail = Loadable( + () => import(/* webpackChunkName: "TraceDetail Page" */ 'pages/TraceDetail'), +); + +export const UsageExplorerPage = Loadable( + () => import(/* webpackChunkName: "UsageExplorerPage" */ 'modules/Usage'), +); + +export const SignupPage = Loadable( + () => import(/* webpackChunkName: "SignupPage" */ 'pages/SignUp'), +); + +export const SettingsPage = Loadable( + () => import(/* webpackChunkName: "SettingsPage" */ 'pages/Settings'), +); + +export const GettingStarted = Loadable( + () => import(/* webpackChunkName: "GettingStarted" */ 'pages/GettingStarted'), +); + +export const Onboarding = Loadable( + () => import(/* webpackChunkName: "Onboarding" */ 'pages/OnboardingPage'), +); + +export const DashboardPage = Loadable( + () => + import(/* webpackChunkName: "DashboardPage" */ 'pages/DashboardsListPage'), +); + +export const NewDashboardPage = Loadable( + () => import(/* webpackChunkName: "New DashboardPage" */ 'pages/NewDashboard'), +); + +export const DashboardWidget = Loadable( + () => + import(/* webpackChunkName: "DashboardWidgetPage" */ 'pages/DashboardWidget'), +); + +export const EditRulesPage = Loadable( + () => import(/* webpackChunkName: "Alerts Edit Page" */ 'pages/EditRules'), +); + +export const ListAllALertsPage = Loadable( + () => import(/* webpackChunkName: "All Alerts Page" */ 'pages/AlertList'), +); + +export const CreateNewAlerts = Loadable( + () => import(/* webpackChunkName: "Create Alerts" */ 'pages/CreateAlert'), +); + +export const CreateAlertChannelAlerts = Loadable( + () => + import(/* webpackChunkName: "Create Channels" */ 'pages/AlertChannelCreate'), +); + +export const EditAlertChannelsAlerts = Loadable( + () => import(/* webpackChunkName: "Edit Channels" */ 'pages/ChannelsEdit'), +); + +export const AllAlertChannels = Loadable( + () => import(/* webpackChunkName: "All Channels" */ 'pages/Settings'), +); + +export const AllErrors = Loadable( + /* webpackChunkName: "All Exceptions" */ () => import('pages/AllErrors'), +); + +export const ErrorDetails = Loadable( + () => import(/* webpackChunkName: "Error Details" */ 'pages/ErrorDetails'), +); + +export const StatusPage = Loadable( + () => import(/* webpackChunkName: "All Status" */ 'pages/Status'), +); + +export const OrganizationSettings = Loadable( + () => import(/* webpackChunkName: "All Settings" */ 'pages/Settings'), +); + +export const IngestionSettings = Loadable( + () => import(/* webpackChunkName: "Ingestion Settings" */ 'pages/Settings'), +); + +export const APIKeys = Loadable( + () => import(/* webpackChunkName: "All Settings" */ 'pages/Settings'), +); + +export const MySettings = Loadable( + () => import(/* webpackChunkName: "All MySettings" */ 'pages/MySettings'), +); + +export const Logs = Loadable( + () => import(/* webpackChunkName: "Logs" */ 'pages/LogsModulePage'), +); + +export const LogsExplorer = Loadable( + () => import(/* webpackChunkName: "Logs Explorer" */ 'pages/LogsModulePage'), +); + +export const OldLogsExplorer = Loadable( + () => import(/* webpackChunkName: "Logs Explorer" */ 'pages/Logs'), +); + +export const LiveLogs = Loadable( + () => import(/* webpackChunkName: "Live Logs" */ 'pages/LiveLogs'), +); + +export const PipelinePage = Loadable( + () => import(/* webpackChunkName: "Pipelines" */ 'pages/LogsModulePage'), +); + +export const Login = Loadable( + () => import(/* webpackChunkName: "Login" */ 'pages/Login'), +); + +export const UnAuthorized = Loadable( + () => import(/* webpackChunkName: "UnAuthorized" */ 'pages/UnAuthorized'), +); + +export const PasswordReset = Loadable( + () => import(/* webpackChunkName: "ResetPassword" */ 'pages/ResetPassword'), +); + +export const SomethingWentWrong = Loadable( + () => + import( + /* webpackChunkName: "SomethingWentWrong" */ 'pages/SomethingWentWrong' + ), +); + +export const LicensePage = Loadable( + () => import(/* webpackChunkName: "All Channels" */ 'pages/License'), +); + +export const LogsIndexToFields = Loadable( + () => + import(/* webpackChunkName: "LogsIndexToFields Page" */ 'pages/LogsSettings'), +); + +export const BillingPage = Loadable( + () => import(/* webpackChunkName: "BillingPage" */ 'pages/Billing'), +); + +export const SupportPage = Loadable( + () => import(/* webpackChunkName: "SupportPage" */ 'pages/Support'), +); + +export const WorkspaceBlocked = Loadable( + () => + import(/* webpackChunkName: "WorkspaceLocked" */ 'pages/WorkspaceLocked'), +); + +export const ShortcutsPage = Loadable( + () => import(/* webpackChunkName: "ShortcutsPage" */ 'pages/Shortcuts'), +); + +export const InstalledIntegrations = Loadable( + () => + import( + /* webpackChunkName: "InstalledIntegrations" */ 'pages/IntegrationsModulePage' + ), +); diff --git a/signoz/frontend/src/AppRoutes/routes.ts b/signoz/frontend/src/AppRoutes/routes.ts new file mode 100644 index 0000000..4fd421f --- /dev/null +++ b/signoz/frontend/src/AppRoutes/routes.ts @@ -0,0 +1,400 @@ +import ROUTES from 'constants/routes'; +import { RouteProps } from 'react-router-dom'; + +import { + AllAlertChannels, + AllErrors, + APIKeys, + BillingPage, + CreateAlertChannelAlerts, + CreateNewAlerts, + DashboardPage, + DashboardWidget, + EditAlertChannelsAlerts, + EditRulesPage, + ErrorDetails, + IngestionSettings, + InstalledIntegrations, + LicensePage, + ListAllALertsPage, + LiveLogs, + Login, + Logs, + LogsExplorer, + LogsIndexToFields, + LogsSaveViews, + MySettings, + NewDashboardPage, + OldLogsExplorer, + Onboarding, + OrganizationSettings, + PasswordReset, + PipelinePage, + ServiceMapPage, + ServiceMetricsPage, + ServicesTablePage, + ServiceTopLevelOperationsPage, + SettingsPage, + ShortcutsPage, + SignupPage, + SomethingWentWrong, + StatusPage, + SupportPage, + TraceDetail, + TraceFilter, + TracesExplorer, + TracesSaveViews, + UnAuthorized, + UsageExplorerPage, + WorkspaceBlocked, +} from './pageComponents'; + +const routes: AppRoutes[] = [ + { + component: SignupPage, + path: ROUTES.SIGN_UP, + exact: true, + isPrivate: false, + key: 'SIGN_UP', + }, + { + path: ROUTES.GET_STARTED, + exact: false, + component: Onboarding, + isPrivate: true, + key: 'GET_STARTED', + }, + { + component: LogsIndexToFields, + path: ROUTES.LOGS_INDEX_FIELDS, + exact: true, + isPrivate: true, + key: 'LOGS_INDEX_FIELDS', + }, + { + component: ServicesTablePage, + path: ROUTES.APPLICATION, + exact: true, + isPrivate: true, + key: 'APPLICATION', + }, + { + path: ROUTES.SERVICE_METRICS, + exact: true, + component: ServiceMetricsPage, + isPrivate: true, + key: 'SERVICE_METRICS', + }, + { + path: ROUTES.SERVICE_TOP_LEVEL_OPERATIONS, + exact: true, + component: ServiceTopLevelOperationsPage, + isPrivate: true, + key: 'SERVICE_TOP_LEVEL_OPERATIONS', + }, + { + path: ROUTES.SERVICE_MAP, + component: ServiceMapPage, + isPrivate: true, + exact: true, + key: 'SERVICE_MAP', + }, + { + path: ROUTES.LOGS_SAVE_VIEWS, + component: LogsSaveViews, + isPrivate: true, + exact: true, + key: 'LOGS_SAVE_VIEWS', + }, + { + path: ROUTES.TRACE_DETAIL, + exact: true, + component: TraceDetail, + isPrivate: true, + key: 'TRACE_DETAIL', + }, + { + path: ROUTES.SETTINGS, + exact: true, + component: SettingsPage, + isPrivate: true, + key: 'SETTINGS', + }, + { + path: ROUTES.USAGE_EXPLORER, + exact: true, + component: UsageExplorerPage, + isPrivate: true, + key: 'USAGE_EXPLORER', + }, + { + path: ROUTES.ALL_DASHBOARD, + exact: true, + component: DashboardPage, + isPrivate: true, + key: 'ALL_DASHBOARD', + }, + { + path: ROUTES.DASHBOARD, + exact: true, + component: NewDashboardPage, + isPrivate: true, + key: 'DASHBOARD', + }, + { + path: ROUTES.DASHBOARD_WIDGET, + exact: true, + component: DashboardWidget, + isPrivate: true, + key: 'DASHBOARD_WIDGET', + }, + { + path: ROUTES.EDIT_ALERTS, + exact: true, + component: EditRulesPage, + isPrivate: true, + key: 'EDIT_ALERTS', + }, + { + path: ROUTES.LIST_ALL_ALERT, + exact: true, + component: ListAllALertsPage, + isPrivate: true, + key: 'LIST_ALL_ALERT', + }, + { + path: ROUTES.ALERTS_NEW, + exact: true, + component: CreateNewAlerts, + isPrivate: true, + key: 'ALERTS_NEW', + }, + { + path: ROUTES.TRACE, + exact: true, + component: TraceFilter, + isPrivate: true, + key: 'TRACE', + }, + { + path: ROUTES.TRACES_EXPLORER, + exact: true, + component: TracesExplorer, + isPrivate: true, + key: 'TRACES_EXPLORER', + }, + { + path: ROUTES.TRACES_SAVE_VIEWS, + exact: true, + component: TracesSaveViews, + isPrivate: true, + key: 'TRACES_SAVE_VIEWS', + }, + { + path: ROUTES.CHANNELS_NEW, + exact: true, + component: CreateAlertChannelAlerts, + isPrivate: true, + key: 'CHANNELS_NEW', + }, + { + path: ROUTES.CHANNELS_EDIT, + exact: true, + component: EditAlertChannelsAlerts, + isPrivate: true, + key: 'CHANNELS_EDIT', + }, + { + path: ROUTES.ALL_CHANNELS, + exact: true, + component: AllAlertChannels, + isPrivate: true, + key: 'ALL_CHANNELS', + }, + { + path: ROUTES.ALL_ERROR, + exact: true, + isPrivate: true, + component: AllErrors, + key: 'ALL_ERROR', + }, + { + path: ROUTES.ERROR_DETAIL, + exact: true, + component: ErrorDetails, + isPrivate: true, + key: 'ERROR_DETAIL', + }, + { + path: ROUTES.VERSION, + exact: true, + component: StatusPage, + isPrivate: true, + key: 'VERSION', + }, + { + path: ROUTES.ORG_SETTINGS, + exact: true, + component: OrganizationSettings, + isPrivate: true, + key: 'ORG_SETTINGS', + }, + { + path: ROUTES.INGESTION_SETTINGS, + exact: true, + component: IngestionSettings, + isPrivate: true, + key: 'INGESTION_SETTINGS', + }, + { + path: ROUTES.API_KEYS, + exact: true, + component: APIKeys, + isPrivate: true, + key: 'API_KEYS', + }, + { + path: ROUTES.MY_SETTINGS, + exact: true, + component: MySettings, + isPrivate: true, + key: 'MY_SETTINGS', + }, + { + path: ROUTES.LOGS, + exact: true, + component: Logs, + key: 'LOGS', + isPrivate: true, + }, + { + path: ROUTES.LOGS_EXPLORER, + exact: true, + component: LogsExplorer, + key: 'LOGS_EXPLORER', + isPrivate: true, + }, + { + path: ROUTES.OLD_LOGS_EXPLORER, + exact: true, + component: OldLogsExplorer, + key: 'OLD_LOGS_EXPLORER', + isPrivate: true, + }, + { + path: ROUTES.LIVE_LOGS, + exact: true, + component: LiveLogs, + key: 'LIVE_LOGS', + isPrivate: true, + }, + { + path: ROUTES.LOGS_PIPELINES, + exact: true, + component: PipelinePage, + key: 'LOGS_PIPELINES', + isPrivate: true, + }, + { + path: ROUTES.LOGIN, + exact: true, + component: Login, + isPrivate: false, + key: 'LOGIN', + }, + { + path: ROUTES.UN_AUTHORIZED, + exact: true, + component: UnAuthorized, + key: 'UN_AUTHORIZED', + isPrivate: true, + }, + { + path: ROUTES.PASSWORD_RESET, + exact: true, + component: PasswordReset, + key: 'PASSWORD_RESET', + isPrivate: false, + }, + { + path: ROUTES.SOMETHING_WENT_WRONG, + exact: true, + component: SomethingWentWrong, + key: 'SOMETHING_WENT_WRONG', + isPrivate: false, + }, + { + path: ROUTES.BILLING, + exact: true, + component: BillingPage, + key: 'BILLING', + isPrivate: true, + }, + { + path: ROUTES.WORKSPACE_LOCKED, + exact: true, + component: WorkspaceBlocked, + isPrivate: true, + key: 'WORKSPACE_LOCKED', + }, + { + path: ROUTES.SHORTCUTS, + exact: true, + component: ShortcutsPage, + isPrivate: true, + key: 'SHORTCUTS', + }, + { + path: ROUTES.INTEGRATIONS, + exact: true, + component: InstalledIntegrations, + isPrivate: true, + key: 'INTEGRATIONS', + }, +]; + +export const SUPPORT_ROUTE: AppRoutes = { + path: ROUTES.SUPPORT, + exact: true, + component: SupportPage, + key: 'SUPPORT', + isPrivate: true, +}; + +export const LIST_LICENSES: AppRoutes = { + path: ROUTES.LIST_LICENSES, + exact: true, + component: LicensePage, + isPrivate: true, + key: 'LIST_LICENSES', +}; + +export const oldRoutes = [ + '/pipelines', + '/logs/old-logs-explorer', + '/logs-explorer', + '/logs-explorer/live', + '/logs-save-views', + '/traces-save-views', + '/settings/api-keys', +]; + +export const oldNewRoutesMapping: Record = { + '/pipelines': '/logs/pipelines', + '/logs/old-logs-explorer': '/logs/old-logs-explorer', + '/logs-explorer': '/logs/logs-explorer', + '/logs-explorer/live': '/logs/logs-explorer/live', + '/logs-save-views': '/logs/saved-views', + '/traces-save-views': '/traces/saved-views', + '/settings/api-keys': '/settings/access-tokens', +}; + +export interface AppRoutes { + component: RouteProps['component']; + path: RouteProps['path']; + exact: RouteProps['exact']; + isPrivate: boolean; + key: keyof typeof ROUTES; +} + +export default routes; diff --git a/signoz/frontend/src/AppRoutes/utils.ts b/signoz/frontend/src/AppRoutes/utils.ts new file mode 100644 index 0000000..68df507 --- /dev/null +++ b/signoz/frontend/src/AppRoutes/utils.ts @@ -0,0 +1,92 @@ +import getLocalStorageApi from 'api/browser/localstorage/get'; +import setLocalStorageApi from 'api/browser/localstorage/set'; +import getUserApi from 'api/user/getUser'; +import { Logout } from 'api/utils'; +import { LOCALSTORAGE } from 'constants/localStorage'; +import store from 'store'; +import AppActions from 'types/actions'; +import { + LOGGED_IN, + UPDATE_USER, + UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN, + UPDATE_USER_IS_FETCH, +} from 'types/actions/app'; +import { SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/user/getUser'; + +const afterLogin = async ( + userId: string, + authToken: string, + refreshToken: string, +): Promise | undefined> => { + setLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN, authToken); + setLocalStorageApi(LOCALSTORAGE.REFRESH_AUTH_TOKEN, refreshToken); + + store.dispatch({ + type: UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN, + payload: { + accessJwt: authToken, + refreshJwt: refreshToken, + }, + }); + + const [getUserResponse] = await Promise.all([ + getUserApi({ + userId, + token: authToken, + }), + ]); + + if (getUserResponse.statusCode === 200 && getUserResponse.payload) { + store.dispatch({ + type: LOGGED_IN, + payload: { + isLoggedIn: true, + }, + }); + + const { payload } = getUserResponse; + + store.dispatch({ + type: UPDATE_USER, + payload: { + ROLE: payload.role, + email: payload.email, + name: payload.name, + orgName: payload.organization, + profilePictureURL: payload.profilePictureURL, + userId: payload.id, + orgId: payload.orgId, + userFlags: payload.flags, + }, + }); + + const isLoggedInLocalStorage = getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN); + + if (isLoggedInLocalStorage === null) { + setLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN, 'true'); + } + + store.dispatch({ + type: UPDATE_USER_IS_FETCH, + payload: { + isUserFetching: false, + }, + }); + + return getUserResponse; + } + + store.dispatch({ + type: UPDATE_USER_IS_FETCH, + payload: { + isUserFetching: false, + }, + }); + + Logout(); + + return undefined; +}; + +export default afterLogin; diff --git a/signoz/frontend/src/ReactI18/index.tsx b/signoz/frontend/src/ReactI18/index.tsx new file mode 100644 index 0000000..a2d89e3 --- /dev/null +++ b/signoz/frontend/src/ReactI18/index.tsx @@ -0,0 +1,35 @@ +import i18n from 'i18next'; +import LanguageDetector from 'i18next-browser-languagedetector'; +import Backend from 'i18next-http-backend'; +import { initReactI18next } from 'react-i18next'; + +import cacheBursting from '../../i18n-translations-hash.json'; + +i18n + // load translation using http -> see /public/locales + .use(Backend) + // detect user language + .use(LanguageDetector) + // pass the i18n instance to react-i18next. + .use(initReactI18next) + // init i18next + .init({ + debug: false, + fallbackLng: 'en', + interpolation: { + escapeValue: false, // not needed for react as it escapes by default + }, + backend: { + loadPath: (language, namespace) => { + const ns = namespace[0]; + const pathkey = `/${language}/${ns}`; + const hash = cacheBursting[pathkey as keyof typeof cacheBursting] || ''; + return `/locales/${language}/${namespace}.json?h=${hash}`; + }, + }, + react: { + useSuspense: false, + }, + }); + +export default i18n; diff --git a/signoz/frontend/src/api/APIKeys/createAPIKey.ts b/signoz/frontend/src/api/APIKeys/createAPIKey.ts new file mode 100644 index 0000000..2b219a0 --- /dev/null +++ b/signoz/frontend/src/api/APIKeys/createAPIKey.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { APIKeyProps, CreateAPIKeyProps } from 'types/api/pat/types'; + +const createAPIKey = async ( + props: CreateAPIKeyProps, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/pats', { + ...props, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default createAPIKey; diff --git a/signoz/frontend/src/api/APIKeys/deleteAPIKey.ts b/signoz/frontend/src/api/APIKeys/deleteAPIKey.ts new file mode 100644 index 0000000..03b8d59 --- /dev/null +++ b/signoz/frontend/src/api/APIKeys/deleteAPIKey.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { AllAPIKeyProps } from 'types/api/pat/types'; + +const deleteAPIKey = async ( + id: string, +): Promise | ErrorResponse> => { + try { + const response = await axios.delete(`/pats/${id}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default deleteAPIKey; diff --git a/signoz/frontend/src/api/APIKeys/getAPIKey.ts b/signoz/frontend/src/api/APIKeys/getAPIKey.ts new file mode 100644 index 0000000..c0410d8 --- /dev/null +++ b/signoz/frontend/src/api/APIKeys/getAPIKey.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/alerts/get'; + +const get = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get(`/pats/${props.id}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default get; diff --git a/signoz/frontend/src/api/APIKeys/getAllAPIKeys.ts b/signoz/frontend/src/api/APIKeys/getAllAPIKeys.ts new file mode 100644 index 0000000..488d9dc --- /dev/null +++ b/signoz/frontend/src/api/APIKeys/getAllAPIKeys.ts @@ -0,0 +1,6 @@ +import axios from 'api'; +import { AxiosResponse } from 'axios'; +import { AllAPIKeyProps } from 'types/api/pat/types'; + +export const getAllAPIKeys = (): Promise> => + axios.get(`/pats`); diff --git a/signoz/frontend/src/api/APIKeys/updateAPIKey.ts b/signoz/frontend/src/api/APIKeys/updateAPIKey.ts new file mode 100644 index 0000000..38d2022 --- /dev/null +++ b/signoz/frontend/src/api/APIKeys/updateAPIKey.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, UpdateAPIKeyProps } from 'types/api/pat/types'; + +const updateAPIKey = async ( + props: UpdateAPIKeyProps, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/pats/${props.id}`, { + ...props.data, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default updateAPIKey; diff --git a/signoz/frontend/src/api/ErrorResponseHandler.ts b/signoz/frontend/src/api/ErrorResponseHandler.ts new file mode 100644 index 0000000..be2dd5e --- /dev/null +++ b/signoz/frontend/src/api/ErrorResponseHandler.ts @@ -0,0 +1,64 @@ +import { AxiosError, AxiosResponse } from 'axios'; +import { ErrorResponse } from 'types/api'; +import { ErrorStatusCode } from 'types/common'; + +export function ErrorResponseHandler(error: AxiosError): ErrorResponse { + const { response, request } = error; + if (response) { + // client received an error response (5xx, 4xx) + // making the error status code as standard Error Status Code + const statusCode = response.status as ErrorStatusCode; + + if (statusCode >= 400 && statusCode < 500) { + const { data } = response as AxiosResponse; + + if (statusCode === 404) { + return { + statusCode, + payload: null, + error: data.errorType || data.type, + message: null, + }; + } + + const { errors, error } = data; + + const errorMessage = + Array.isArray(errors) && errors.length >= 1 ? errors[0].msg : error; + + return { + statusCode, + payload: null, + error: errorMessage, + message: (response.data as any)?.status, + body: JSON.stringify((response.data as any).data), + }; + } + + return { + statusCode, + payload: null, + error: 'Something went wrong', + message: null, + }; + } + if (request) { + // client never received a response, or request never left + console.error('client never received a response, or request never left'); + + return { + statusCode: 500, + payload: null, + error: 'Something went wrong', + message: null, + }; + } + // anything else + console.error('any'); + return { + statusCode: 500, + payload: null, + error: String(error), + message: null, + }; +} diff --git a/signoz/frontend/src/api/IngestionKeys/createIngestionKey.ts b/signoz/frontend/src/api/IngestionKeys/createIngestionKey.ts new file mode 100644 index 0000000..77556ed --- /dev/null +++ b/signoz/frontend/src/api/IngestionKeys/createIngestionKey.ts @@ -0,0 +1,29 @@ +import { GatewayApiV1Instance } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + CreateIngestionKeyProps, + IngestionKeyProps, +} from 'types/api/ingestionKeys/types'; + +const createIngestionKey = async ( + props: CreateIngestionKeyProps, +): Promise | ErrorResponse> => { + try { + const response = await GatewayApiV1Instance.post('/workspaces/me/keys', { + ...props, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default createIngestionKey; diff --git a/signoz/frontend/src/api/IngestionKeys/deleteIngestionKey.ts b/signoz/frontend/src/api/IngestionKeys/deleteIngestionKey.ts new file mode 100644 index 0000000..5f4e7e0 --- /dev/null +++ b/signoz/frontend/src/api/IngestionKeys/deleteIngestionKey.ts @@ -0,0 +1,26 @@ +import { GatewayApiV1Instance } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types'; + +const deleteIngestionKey = async ( + id: string, +): Promise | ErrorResponse> => { + try { + const response = await GatewayApiV1Instance.delete( + `/workspaces/me/keys/${id}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default deleteIngestionKey; diff --git a/signoz/frontend/src/api/IngestionKeys/getAllIngestionKeys.ts b/signoz/frontend/src/api/IngestionKeys/getAllIngestionKeys.ts new file mode 100644 index 0000000..e202917 --- /dev/null +++ b/signoz/frontend/src/api/IngestionKeys/getAllIngestionKeys.ts @@ -0,0 +1,21 @@ +import { GatewayApiV1Instance } from 'api'; +import { AxiosResponse } from 'axios'; +import { + AllIngestionKeyProps, + GetIngestionKeyProps, +} from 'types/api/ingestionKeys/types'; + +export const getAllIngestionKeys = ( + props: GetIngestionKeyProps, +): Promise> => { + // eslint-disable-next-line @typescript-eslint/naming-convention + const { search, per_page, page } = props; + + const BASE_URL = '/workspaces/me/keys'; + const URL_QUERY_PARAMS = + search && search.length > 0 + ? `/search?name=${search}&page=1&per_page=100` + : `?page=${page}&per_page=${per_page}`; + + return GatewayApiV1Instance.get(`${BASE_URL}${URL_QUERY_PARAMS}`); +}; diff --git a/signoz/frontend/src/api/IngestionKeys/limits/createLimitsForKey.ts b/signoz/frontend/src/api/IngestionKeys/limits/createLimitsForKey.ts new file mode 100644 index 0000000..75128b9 --- /dev/null +++ b/signoz/frontend/src/api/IngestionKeys/limits/createLimitsForKey.ts @@ -0,0 +1,65 @@ +/* eslint-disable @typescript-eslint/no-throw-literal */ +import { GatewayApiV1Instance } from 'api'; +import axios from 'axios'; +import { + AddLimitProps, + LimitSuccessProps, +} from 'types/api/ingestionKeys/limits/types'; + +interface SuccessResponse { + statusCode: number; + error: null; + message: string; + payload: T; +} + +interface ErrorResponse { + statusCode: number; + error: string; + message: string; + payload: null; +} + +const createLimitForIngestionKey = async ( + props: AddLimitProps, +): Promise | ErrorResponse> => { + try { + const response = await GatewayApiV1Instance.post( + `/workspaces/me/keys/${props.keyID}/limits`, + { + ...props, + }, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + if (axios.isAxiosError(error)) { + // Axios error + const errResponse: ErrorResponse = { + statusCode: error.response?.status || 500, + error: error.response?.data?.error, + message: error.response?.data?.status || 'An error occurred', + payload: null, + }; + + throw errResponse; + } else { + // Non-Axios error + const errResponse: ErrorResponse = { + statusCode: 500, + error: 'Unknown error', + message: 'An unknown error occurred', + payload: null, + }; + + throw errResponse; + } + } +}; + +export default createLimitForIngestionKey; diff --git a/signoz/frontend/src/api/IngestionKeys/limits/deleteLimitsForIngestionKey.ts b/signoz/frontend/src/api/IngestionKeys/limits/deleteLimitsForIngestionKey.ts new file mode 100644 index 0000000..c0b3480 --- /dev/null +++ b/signoz/frontend/src/api/IngestionKeys/limits/deleteLimitsForIngestionKey.ts @@ -0,0 +1,26 @@ +import { GatewayApiV1Instance } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types'; + +const deleteLimitsForIngestionKey = async ( + id: string, +): Promise | ErrorResponse> => { + try { + const response = await GatewayApiV1Instance.delete( + `/workspaces/me/limits/${id}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default deleteLimitsForIngestionKey; diff --git a/signoz/frontend/src/api/IngestionKeys/limits/updateLimitsForIngestionKey.ts b/signoz/frontend/src/api/IngestionKeys/limits/updateLimitsForIngestionKey.ts new file mode 100644 index 0000000..89f3031 --- /dev/null +++ b/signoz/frontend/src/api/IngestionKeys/limits/updateLimitsForIngestionKey.ts @@ -0,0 +1,65 @@ +/* eslint-disable @typescript-eslint/no-throw-literal */ +import { GatewayApiV1Instance } from 'api'; +import axios from 'axios'; +import { + LimitSuccessProps, + UpdateLimitProps, +} from 'types/api/ingestionKeys/limits/types'; + +interface SuccessResponse { + statusCode: number; + error: null; + message: string; + payload: T; +} + +interface ErrorResponse { + statusCode: number; + error: string; + message: string; + payload: null; +} + +const updateLimitForIngestionKey = async ( + props: UpdateLimitProps, +): Promise | ErrorResponse> => { + try { + const response = await GatewayApiV1Instance.patch( + `/workspaces/me/limits/${props.limitID}`, + { + config: props.config, + }, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + if (axios.isAxiosError(error)) { + // Axios error + const errResponse: ErrorResponse = { + statusCode: error.response?.status || 500, + error: error.response?.data?.error, + message: error.response?.data?.status || 'An error occurred', + payload: null, + }; + + throw errResponse; + } else { + // Non-Axios error + const errResponse: ErrorResponse = { + statusCode: 500, + error: 'Unknown error', + message: 'An unknown error occurred', + payload: null, + }; + + throw errResponse; + } + } +}; + +export default updateLimitForIngestionKey; diff --git a/signoz/frontend/src/api/IngestionKeys/updateIngestionKey.ts b/signoz/frontend/src/api/IngestionKeys/updateIngestionKey.ts new file mode 100644 index 0000000..c4777ef --- /dev/null +++ b/signoz/frontend/src/api/IngestionKeys/updateIngestionKey.ts @@ -0,0 +1,32 @@ +import { GatewayApiV1Instance } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + IngestionKeysPayloadProps, + UpdateIngestionKeyProps, +} from 'types/api/ingestionKeys/types'; + +const updateIngestionKey = async ( + props: UpdateIngestionKeyProps, +): Promise | ErrorResponse> => { + try { + const response = await GatewayApiV1Instance.patch( + `/workspaces/me/keys/${props.id}`, + { + ...props.data, + }, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default updateIngestionKey; diff --git a/signoz/frontend/src/api/Integrations/getAllIntegrations.ts b/signoz/frontend/src/api/Integrations/getAllIntegrations.ts new file mode 100644 index 0000000..8aec6ef --- /dev/null +++ b/signoz/frontend/src/api/Integrations/getAllIntegrations.ts @@ -0,0 +1,7 @@ +import axios from 'api'; +import { AxiosResponse } from 'axios'; +import { AllIntegrationsProps } from 'types/api/integrations/types'; + +export const getAllIntegrations = (): Promise< + AxiosResponse +> => axios.get(`/integrations`); diff --git a/signoz/frontend/src/api/Integrations/getIntegration.ts b/signoz/frontend/src/api/Integrations/getIntegration.ts new file mode 100644 index 0000000..84fb696 --- /dev/null +++ b/signoz/frontend/src/api/Integrations/getIntegration.ts @@ -0,0 +1,11 @@ +import axios from 'api'; +import { AxiosResponse } from 'axios'; +import { + GetIntegrationPayloadProps, + GetIntegrationProps, +} from 'types/api/integrations/types'; + +export const getIntegration = ( + props: GetIntegrationPayloadProps, +): Promise> => + axios.get(`/integrations/${props.integrationId}`); diff --git a/signoz/frontend/src/api/Integrations/getIntegrationStatus.ts b/signoz/frontend/src/api/Integrations/getIntegrationStatus.ts new file mode 100644 index 0000000..fbfbca2 --- /dev/null +++ b/signoz/frontend/src/api/Integrations/getIntegrationStatus.ts @@ -0,0 +1,11 @@ +import axios from 'api'; +import { AxiosResponse } from 'axios'; +import { + GetIntegrationPayloadProps, + GetIntegrationStatusProps, +} from 'types/api/integrations/types'; + +export const getIntegrationStatus = ( + props: GetIntegrationPayloadProps, +): Promise> => + axios.get(`/integrations/${props.integrationId}/connection_status`); diff --git a/signoz/frontend/src/api/Integrations/installIntegration.ts b/signoz/frontend/src/api/Integrations/installIntegration.ts new file mode 100644 index 0000000..609ec00 --- /dev/null +++ b/signoz/frontend/src/api/Integrations/installIntegration.ts @@ -0,0 +1,31 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + InstalledIntegrationsSuccessResponse, + InstallIntegrationKeyProps, +} from 'types/api/integrations/types'; + +const installIntegration = async ( + props: InstallIntegrationKeyProps, +): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.post('/integrations/install', { + ...props, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default installIntegration; diff --git a/signoz/frontend/src/api/Integrations/uninstallIntegration.ts b/signoz/frontend/src/api/Integrations/uninstallIntegration.ts new file mode 100644 index 0000000..f2a9760 --- /dev/null +++ b/signoz/frontend/src/api/Integrations/uninstallIntegration.ts @@ -0,0 +1,31 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + UninstallIntegrationProps, + UninstallIntegrationSuccessResponse, +} from 'types/api/integrations/types'; + +const unInstallIntegration = async ( + props: UninstallIntegrationProps, +): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.post('/integrations/uninstall', { + ...props, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default unInstallIntegration; diff --git a/signoz/frontend/src/api/SAML/deleteDomain.ts b/signoz/frontend/src/api/SAML/deleteDomain.ts new file mode 100644 index 0000000..50c2b51 --- /dev/null +++ b/signoz/frontend/src/api/SAML/deleteDomain.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/SAML/deleteDomain'; + +const deleteDomain = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.delete(`/domains/${props.id}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default deleteDomain; diff --git a/signoz/frontend/src/api/SAML/listAllDomain.ts b/signoz/frontend/src/api/SAML/listAllDomain.ts new file mode 100644 index 0000000..41620f7 --- /dev/null +++ b/signoz/frontend/src/api/SAML/listAllDomain.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/SAML/listDomain'; + +const listAllDomain = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get(`/orgs/${props.orgId}/domains`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default listAllDomain; diff --git a/signoz/frontend/src/api/SAML/postDomain.ts b/signoz/frontend/src/api/SAML/postDomain.ts new file mode 100644 index 0000000..34a8ecd --- /dev/null +++ b/signoz/frontend/src/api/SAML/postDomain.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/SAML/postDomain'; + +const postDomain = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post(`/domains`, props); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default postDomain; diff --git a/signoz/frontend/src/api/SAML/updateDomain.ts b/signoz/frontend/src/api/SAML/updateDomain.ts new file mode 100644 index 0000000..0c4cce8 --- /dev/null +++ b/signoz/frontend/src/api/SAML/updateDomain.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/SAML/updateDomain'; + +const updateDomain = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/domains/${props.id}`, props); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default updateDomain; diff --git a/signoz/frontend/src/api/alerts/create.ts b/signoz/frontend/src/api/alerts/create.ts new file mode 100644 index 0000000..cad7917 --- /dev/null +++ b/signoz/frontend/src/api/alerts/create.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/alerts/create'; + +const create = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/rules', { + ...props.data, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default create; diff --git a/signoz/frontend/src/api/alerts/delete.ts b/signoz/frontend/src/api/alerts/delete.ts new file mode 100644 index 0000000..278e3e2 --- /dev/null +++ b/signoz/frontend/src/api/alerts/delete.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/alerts/delete'; + +const deleteAlerts = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.delete(`/rules/${props.id}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data.rules, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default deleteAlerts; diff --git a/signoz/frontend/src/api/alerts/get.ts b/signoz/frontend/src/api/alerts/get.ts new file mode 100644 index 0000000..0437f8d --- /dev/null +++ b/signoz/frontend/src/api/alerts/get.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/alerts/get'; + +const get = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get(`/rules/${props.id}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default get; diff --git a/signoz/frontend/src/api/alerts/getAll.ts b/signoz/frontend/src/api/alerts/getAll.ts new file mode 100644 index 0000000..e6b1fdb --- /dev/null +++ b/signoz/frontend/src/api/alerts/getAll.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/alerts/getAll'; + +const getAll = async (): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.get('/rules'); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data.rules, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getAll; diff --git a/signoz/frontend/src/api/alerts/getGroup.ts b/signoz/frontend/src/api/alerts/getGroup.ts new file mode 100644 index 0000000..f5e7247 --- /dev/null +++ b/signoz/frontend/src/api/alerts/getGroup.ts @@ -0,0 +1,29 @@ +import { AxiosAlertManagerInstance } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import convertObjectIntoParams from 'lib/query/convertObjectIntoParams'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/alerts/getGroups'; + +const getGroups = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const queryParams = convertObjectIntoParams(props); + + const response = await AxiosAlertManagerInstance.get( + `/alerts/groups?${queryParams}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getGroups; diff --git a/signoz/frontend/src/api/alerts/getTriggered.ts b/signoz/frontend/src/api/alerts/getTriggered.ts new file mode 100644 index 0000000..6955cc3 --- /dev/null +++ b/signoz/frontend/src/api/alerts/getTriggered.ts @@ -0,0 +1,29 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import convertObjectIntoParams from 'lib/query/convertObjectIntoParams'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/alerts/getTriggered'; + +const getTriggered = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const queryParams = convertObjectIntoParams(props); + + const response = await axios.get(`/alerts?${queryParams}`); + + const amData = JSON.parse(response.data.data); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: amData.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getTriggered; diff --git a/signoz/frontend/src/api/alerts/patch.ts b/signoz/frontend/src/api/alerts/patch.ts new file mode 100644 index 0000000..920b53a --- /dev/null +++ b/signoz/frontend/src/api/alerts/patch.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/alerts/patch'; + +const patch = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.patch(`/rules/${props.id}`, { + ...props.data, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default patch; diff --git a/signoz/frontend/src/api/alerts/put.ts b/signoz/frontend/src/api/alerts/put.ts new file mode 100644 index 0000000..b8c34e9 --- /dev/null +++ b/signoz/frontend/src/api/alerts/put.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/alerts/save'; + +const put = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/rules/${props.id}`, { + ...props.data, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default put; diff --git a/signoz/frontend/src/api/alerts/save.ts b/signoz/frontend/src/api/alerts/save.ts new file mode 100644 index 0000000..229f0ae --- /dev/null +++ b/signoz/frontend/src/api/alerts/save.ts @@ -0,0 +1,17 @@ +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/alerts/save'; + +import create from './create'; +import put from './put'; + +const save = async ( + props: Props, +): Promise | ErrorResponse> => { + if (props.id && props.id > 0) { + return put({ ...props }); + } + + return create({ ...props }); +}; + +export default save; diff --git a/signoz/frontend/src/api/alerts/testAlert.ts b/signoz/frontend/src/api/alerts/testAlert.ts new file mode 100644 index 0000000..a30e977 --- /dev/null +++ b/signoz/frontend/src/api/alerts/testAlert.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/alerts/testAlert'; + +const testAlert = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/testRule', { + ...props.data, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default testAlert; diff --git a/signoz/frontend/src/api/apiV1.ts b/signoz/frontend/src/api/apiV1.ts new file mode 100644 index 0000000..613ed27 --- /dev/null +++ b/signoz/frontend/src/api/apiV1.ts @@ -0,0 +1,9 @@ +const apiV1 = '/api/v1/'; + +export const apiV2 = '/api/v2/'; +export const apiV3 = '/api/v3/'; +export const apiV4 = '/api/v4/'; +export const gatewayApiV1 = '/api/gateway/v1/'; +export const apiAlertManager = '/api/alertmanager/'; + +export default apiV1; diff --git a/signoz/frontend/src/api/billing/checkout.ts b/signoz/frontend/src/api/billing/checkout.ts new file mode 100644 index 0000000..e6c7640 --- /dev/null +++ b/signoz/frontend/src/api/billing/checkout.ts @@ -0,0 +1,31 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + CheckoutRequestPayloadProps, + CheckoutSuccessPayloadProps, +} from 'types/api/billing/checkout'; + +const updateCreditCardApi = async ( + props: CheckoutRequestPayloadProps, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/checkout', { + licenseKey: props.licenseKey, + successURL: props.successURL, + cancelURL: props.cancelURL, // temp + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default updateCreditCardApi; diff --git a/signoz/frontend/src/api/billing/getUsage.ts b/signoz/frontend/src/api/billing/getUsage.ts new file mode 100644 index 0000000..da7b6eb --- /dev/null +++ b/signoz/frontend/src/api/billing/getUsage.ts @@ -0,0 +1,36 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; + +export interface UsageResponsePayloadProps { + billingPeriodStart: Date; + billingPeriodEnd: Date; + details: { + total: number; + baseFee: number; + breakdown: []; + billTotal: number; + }; + discount: number; + subscriptionStatus?: string; +} + +const getUsage = async ( + licenseKey: string, +): Promise | ErrorResponse> => { + try { + const response = await axios.get(`/billing?licenseKey=${licenseKey}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getUsage; diff --git a/signoz/frontend/src/api/billing/manage.ts b/signoz/frontend/src/api/billing/manage.ts new file mode 100644 index 0000000..dca561b --- /dev/null +++ b/signoz/frontend/src/api/billing/manage.ts @@ -0,0 +1,30 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + CheckoutRequestPayloadProps, + CheckoutSuccessPayloadProps, +} from 'types/api/billing/checkout'; + +const manageCreditCardApi = async ( + props: CheckoutRequestPayloadProps, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/portal', { + licenseKey: props.licenseKey, + returnURL: props.successURL, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default manageCreditCardApi; diff --git a/signoz/frontend/src/api/browser/localstorage/get.ts b/signoz/frontend/src/api/browser/localstorage/get.ts new file mode 100644 index 0000000..026c7a7 --- /dev/null +++ b/signoz/frontend/src/api/browser/localstorage/get.ts @@ -0,0 +1,9 @@ +const get = (key: string): string | null => { + try { + return localStorage.getItem(key); + } catch (e) { + return ''; + } +}; + +export default get; diff --git a/signoz/frontend/src/api/browser/localstorage/remove.ts b/signoz/frontend/src/api/browser/localstorage/remove.ts new file mode 100644 index 0000000..b1c5c40 --- /dev/null +++ b/signoz/frontend/src/api/browser/localstorage/remove.ts @@ -0,0 +1,10 @@ +const remove = (key: string): boolean => { + try { + window.localStorage.removeItem(key); + return true; + } catch (e) { + return false; + } +}; + +export default remove; diff --git a/signoz/frontend/src/api/browser/localstorage/set.ts b/signoz/frontend/src/api/browser/localstorage/set.ts new file mode 100644 index 0000000..1ce9f31 --- /dev/null +++ b/signoz/frontend/src/api/browser/localstorage/set.ts @@ -0,0 +1,10 @@ +const set = (key: string, value: string): boolean => { + try { + localStorage.setItem(key, value); + return true; + } catch (e) { + return false; + } +}; + +export default set; diff --git a/signoz/frontend/src/api/channels/createEmail.ts b/signoz/frontend/src/api/channels/createEmail.ts new file mode 100644 index 0000000..7d0910d --- /dev/null +++ b/signoz/frontend/src/api/channels/createEmail.ts @@ -0,0 +1,34 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createEmail'; + +const create = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/channels', { + name: props.name, + email_configs: [ + { + send_resolved: props.send_resolved, + to: props.to, + html: props.html, + headers: props.headers, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default create; diff --git a/signoz/frontend/src/api/channels/createMsTeams.ts b/signoz/frontend/src/api/channels/createMsTeams.ts new file mode 100644 index 0000000..ef9d309 --- /dev/null +++ b/signoz/frontend/src/api/channels/createMsTeams.ts @@ -0,0 +1,34 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createMsTeams'; + +const create = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/channels', { + name: props.name, + msteams_configs: [ + { + send_resolved: props.send_resolved, + webhook_url: props.webhook_url, + title: props.title, + text: props.text, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default create; diff --git a/signoz/frontend/src/api/channels/createOpsgenie.ts b/signoz/frontend/src/api/channels/createOpsgenie.ts new file mode 100644 index 0000000..4cf60f9 --- /dev/null +++ b/signoz/frontend/src/api/channels/createOpsgenie.ts @@ -0,0 +1,37 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createOpsgenie'; + +const create = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/channels', { + name: props.name, + opsgenie_configs: [ + { + api_key: props.api_key, + description: props.description, + priority: props.priority, + message: props.message, + details: { + ...props.detailsArray, + }, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default create; diff --git a/signoz/frontend/src/api/channels/createPager.ts b/signoz/frontend/src/api/channels/createPager.ts new file mode 100644 index 0000000..682874f --- /dev/null +++ b/signoz/frontend/src/api/channels/createPager.ts @@ -0,0 +1,42 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createPager'; + +const create = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/channels', { + name: props.name, + pagerduty_configs: [ + { + send_resolved: props.send_resolved, + routing_key: props.routing_key, + client: props.client, + client_url: props.client_url, + description: props.description, + severity: props.severity, + class: props.class, + component: props.component, + group: props.group, + details: { + ...props.detailsArray, + }, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default create; diff --git a/signoz/frontend/src/api/channels/createSlack.ts b/signoz/frontend/src/api/channels/createSlack.ts new file mode 100644 index 0000000..d68bedd --- /dev/null +++ b/signoz/frontend/src/api/channels/createSlack.ts @@ -0,0 +1,35 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createSlack'; + +const create = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/channels', { + name: props.name, + slack_configs: [ + { + send_resolved: props.send_resolved, + api_url: props.api_url, + channel: props.channel, + title: props.title, + text: props.text, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default create; diff --git a/signoz/frontend/src/api/channels/createWebhook.ts b/signoz/frontend/src/api/channels/createWebhook.ts new file mode 100644 index 0000000..67a0de7 --- /dev/null +++ b/signoz/frontend/src/api/channels/createWebhook.ts @@ -0,0 +1,51 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createWebhook'; + +const create = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + let httpConfig = {}; + + if (props.username !== '' && props.password !== '') { + httpConfig = { + basic_auth: { + username: props.username, + password: props.password, + }, + }; + } else if (props.username === '' && props.password !== '') { + httpConfig = { + authorization: { + type: 'bearer', + credentials: props.password, + }, + }; + } + + const response = await axios.post('/channels', { + name: props.name, + webhook_configs: [ + { + send_resolved: props.send_resolved, + url: props.api_url, + http_config: httpConfig, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default create; diff --git a/signoz/frontend/src/api/channels/delete.ts b/signoz/frontend/src/api/channels/delete.ts new file mode 100644 index 0000000..a5366af --- /dev/null +++ b/signoz/frontend/src/api/channels/delete.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/delete'; + +const deleteChannel = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.delete(`/channels/${props.id}`); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default deleteChannel; diff --git a/signoz/frontend/src/api/channels/editEmail.ts b/signoz/frontend/src/api/channels/editEmail.ts new file mode 100644 index 0000000..b80fe68 --- /dev/null +++ b/signoz/frontend/src/api/channels/editEmail.ts @@ -0,0 +1,34 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/editEmail'; + +const editEmail = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/channels/${props.id}`, { + name: props.name, + email_configs: [ + { + send_resolved: props.send_resolved, + to: props.to, + html: props.html, + headers: props.headers, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default editEmail; diff --git a/signoz/frontend/src/api/channels/editMsTeams.ts b/signoz/frontend/src/api/channels/editMsTeams.ts new file mode 100644 index 0000000..293688f --- /dev/null +++ b/signoz/frontend/src/api/channels/editMsTeams.ts @@ -0,0 +1,34 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/editMsTeams'; + +const editMsTeams = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/channels/${props.id}`, { + name: props.name, + msteams_configs: [ + { + send_resolved: props.send_resolved, + webhook_url: props.webhook_url, + title: props.title, + text: props.text, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default editMsTeams; diff --git a/signoz/frontend/src/api/channels/editOpsgenie.ts b/signoz/frontend/src/api/channels/editOpsgenie.ts new file mode 100644 index 0000000..1eb65c7 --- /dev/null +++ b/signoz/frontend/src/api/channels/editOpsgenie.ts @@ -0,0 +1,38 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/editOpsgenie'; + +const editOpsgenie = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/channels/${props.id}`, { + name: props.name, + opsgenie_configs: [ + { + send_resolved: props.send_resolved, + api_key: props.api_key, + description: props.description, + priority: props.priority, + message: props.message, + details: { + ...props.detailsArray, + }, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default editOpsgenie; diff --git a/signoz/frontend/src/api/channels/editPager.ts b/signoz/frontend/src/api/channels/editPager.ts new file mode 100644 index 0000000..091d42b --- /dev/null +++ b/signoz/frontend/src/api/channels/editPager.ts @@ -0,0 +1,42 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/editPager'; + +const editPager = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/channels/${props.id}`, { + name: props.name, + pagerduty_configs: [ + { + send_resolved: props.send_resolved, + routing_key: props.routing_key, + client: props.client, + client_url: props.client_url, + description: props.description, + severity: props.severity, + class: props.class, + component: props.component, + group: props.group, + details: { + ...props.detailsArray, + }, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default editPager; diff --git a/signoz/frontend/src/api/channels/editSlack.ts b/signoz/frontend/src/api/channels/editSlack.ts new file mode 100644 index 0000000..6396464 --- /dev/null +++ b/signoz/frontend/src/api/channels/editSlack.ts @@ -0,0 +1,35 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/editSlack'; + +const editSlack = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/channels/${props.id}`, { + name: props.name, + slack_configs: [ + { + send_resolved: props.send_resolved, + api_url: props.api_url, + channel: props.channel, + title: props.title, + text: props.text, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default editSlack; diff --git a/signoz/frontend/src/api/channels/editWebhook.ts b/signoz/frontend/src/api/channels/editWebhook.ts new file mode 100644 index 0000000..a96850c --- /dev/null +++ b/signoz/frontend/src/api/channels/editWebhook.ts @@ -0,0 +1,50 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/editWebhook'; + +const editWebhook = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + let httpConfig = {}; + if (props.username !== '' && props.password !== '') { + httpConfig = { + basic_auth: { + username: props.username, + password: props.password, + }, + }; + } else if (props.username === '' && props.password !== '') { + httpConfig = { + authorization: { + type: 'bearer', + credentials: props.password, + }, + }; + } + + const response = await axios.put(`/channels/${props.id}`, { + name: props.name, + webhook_configs: [ + { + send_resolved: props.send_resolved, + url: props.api_url, + http_config: httpConfig, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default editWebhook; diff --git a/signoz/frontend/src/api/channels/get.ts b/signoz/frontend/src/api/channels/get.ts new file mode 100644 index 0000000..39c40ec --- /dev/null +++ b/signoz/frontend/src/api/channels/get.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/get'; + +const get = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get(`/channels/${props.id}`); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default get; diff --git a/signoz/frontend/src/api/channels/getAll.ts b/signoz/frontend/src/api/channels/getAll.ts new file mode 100644 index 0000000..11b530a --- /dev/null +++ b/signoz/frontend/src/api/channels/getAll.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/channels/getAll'; + +const getAll = async (): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.get('/channels'); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getAll; diff --git a/signoz/frontend/src/api/channels/testEmail.ts b/signoz/frontend/src/api/channels/testEmail.ts new file mode 100644 index 0000000..825836a --- /dev/null +++ b/signoz/frontend/src/api/channels/testEmail.ts @@ -0,0 +1,34 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createEmail'; + +const testEmail = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/testChannel', { + name: props.name, + email_configs: [ + { + send_resolved: true, + to: props.to, + html: props.html, + headers: props.headers, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default testEmail; diff --git a/signoz/frontend/src/api/channels/testMsTeams.ts b/signoz/frontend/src/api/channels/testMsTeams.ts new file mode 100644 index 0000000..3b4fc21 --- /dev/null +++ b/signoz/frontend/src/api/channels/testMsTeams.ts @@ -0,0 +1,34 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createMsTeams'; + +const testMsTeams = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/testChannel', { + name: props.name, + msteams_configs: [ + { + send_resolved: true, + webhook_url: props.webhook_url, + title: props.title, + text: props.text, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default testMsTeams; diff --git a/signoz/frontend/src/api/channels/testOpsgenie.ts b/signoz/frontend/src/api/channels/testOpsgenie.ts new file mode 100644 index 0000000..780a443 --- /dev/null +++ b/signoz/frontend/src/api/channels/testOpsgenie.ts @@ -0,0 +1,37 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createOpsgenie'; + +const testOpsgenie = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/testChannel', { + name: props.name, + opsgenie_configs: [ + { + api_key: props.api_key, + description: props.description, + priority: props.priority, + message: props.message, + details: { + ...props.detailsArray, + }, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default testOpsgenie; diff --git a/signoz/frontend/src/api/channels/testPager.ts b/signoz/frontend/src/api/channels/testPager.ts new file mode 100644 index 0000000..7174046 --- /dev/null +++ b/signoz/frontend/src/api/channels/testPager.ts @@ -0,0 +1,42 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createPager'; + +const testPager = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/testChannel', { + name: props.name, + pagerduty_configs: [ + { + send_resolved: true, + routing_key: props.routing_key, + client: props.client, + client_url: props.client_url, + description: props.description, + severity: props.severity, + class: props.class, + component: props.component, + group: props.group, + details: { + ...props.detailsArray, + }, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default testPager; diff --git a/signoz/frontend/src/api/channels/testSlack.ts b/signoz/frontend/src/api/channels/testSlack.ts new file mode 100644 index 0000000..a2b4b1f --- /dev/null +++ b/signoz/frontend/src/api/channels/testSlack.ts @@ -0,0 +1,35 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createSlack'; + +const testSlack = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/testChannel', { + name: props.name, + slack_configs: [ + { + send_resolved: true, + api_url: props.api_url, + channel: props.channel, + title: props.title, + text: props.text, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default testSlack; diff --git a/signoz/frontend/src/api/channels/testWebhook.ts b/signoz/frontend/src/api/channels/testWebhook.ts new file mode 100644 index 0000000..4b915e9 --- /dev/null +++ b/signoz/frontend/src/api/channels/testWebhook.ts @@ -0,0 +1,51 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/channels/createWebhook'; + +const testWebhook = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + let httpConfig = {}; + + if (props.username !== '' && props.password !== '') { + httpConfig = { + basic_auth: { + username: props.username, + password: props.password, + }, + }; + } else if (props.username === '' && props.password !== '') { + httpConfig = { + authorization: { + type: 'bearer', + credentials: props.password, + }, + }; + } + + const response = await axios.post('/testChannel', { + name: props.name, + webhook_configs: [ + { + send_resolved: true, + url: props.api_url, + http_config: httpConfig, + }, + ], + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default testWebhook; diff --git a/signoz/frontend/src/api/common/logEvent.ts b/signoz/frontend/src/api/common/logEvent.ts new file mode 100644 index 0000000..a1bf3db --- /dev/null +++ b/signoz/frontend/src/api/common/logEvent.ts @@ -0,0 +1,29 @@ +import { ApiBaseInstance as axios } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { EventSuccessPayloadProps } from 'types/api/events/types'; + +const logEvent = async ( + eventName: string, + attributes: Record, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/event', { + eventName, + attributes, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + console.error(error); + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default logEvent; diff --git a/signoz/frontend/src/api/dashboard/create.ts b/signoz/frontend/src/api/dashboard/create.ts new file mode 100644 index 0000000..bf5458a --- /dev/null +++ b/signoz/frontend/src/api/dashboard/create.ts @@ -0,0 +1,27 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/dashboard/create'; + +const createDashboard = async ( + props: Props, +): Promise | ErrorResponse> => { + const url = props.uploadedGrafana ? '/dashboards/grafana' : '/dashboards'; + try { + const response = await axios.post(url, { + ...props, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default createDashboard; diff --git a/signoz/frontend/src/api/dashboard/delete.ts b/signoz/frontend/src/api/dashboard/delete.ts new file mode 100644 index 0000000..8faf711 --- /dev/null +++ b/signoz/frontend/src/api/dashboard/delete.ts @@ -0,0 +1,9 @@ +import axios from 'api'; +import { PayloadProps, Props } from 'types/api/dashboard/delete'; + +const deleteDashboard = (props: Props): Promise => + axios + .delete(`/dashboards/${props.uuid}`) + .then((response) => response.data); + +export default deleteDashboard; diff --git a/signoz/frontend/src/api/dashboard/get.ts b/signoz/frontend/src/api/dashboard/get.ts new file mode 100644 index 0000000..01e04c6 --- /dev/null +++ b/signoz/frontend/src/api/dashboard/get.ts @@ -0,0 +1,11 @@ +import axios from 'api'; +import { ApiResponse } from 'types/api'; +import { Props } from 'types/api/dashboard/get'; +import { Dashboard } from 'types/api/dashboard/getAll'; + +const getDashboard = (props: Props): Promise => + axios + .get>(`/dashboards/${props.uuid}`) + .then((res) => res.data.data); + +export default getDashboard; diff --git a/signoz/frontend/src/api/dashboard/getAll.ts b/signoz/frontend/src/api/dashboard/getAll.ts new file mode 100644 index 0000000..aafe44b --- /dev/null +++ b/signoz/frontend/src/api/dashboard/getAll.ts @@ -0,0 +1,8 @@ +import axios from 'api'; +import { ApiResponse } from 'types/api'; +import { Dashboard } from 'types/api/dashboard/getAll'; + +export const getAllDashboardList = (): Promise => + axios + .get>('/dashboards') + .then((res) => res.data.data); diff --git a/signoz/frontend/src/api/dashboard/lockDashboard.ts b/signoz/frontend/src/api/dashboard/lockDashboard.ts new file mode 100644 index 0000000..3393de8 --- /dev/null +++ b/signoz/frontend/src/api/dashboard/lockDashboard.ts @@ -0,0 +1,11 @@ +import axios from 'api'; +import { AxiosResponse } from 'axios'; + +interface LockDashboardProps { + uuid: string; +} + +const lockDashboard = (props: LockDashboardProps): Promise => + axios.put(`/dashboards/${props.uuid}/lock`); + +export default lockDashboard; diff --git a/signoz/frontend/src/api/dashboard/queryRangeFormat.ts b/signoz/frontend/src/api/dashboard/queryRangeFormat.ts new file mode 100644 index 0000000..02e020b --- /dev/null +++ b/signoz/frontend/src/api/dashboard/queryRangeFormat.ts @@ -0,0 +1,15 @@ +import { ApiV3Instance as axios } from 'api'; +import { ApiResponse } from 'types/api'; +import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery'; +import { QueryRangePayload } from 'types/api/metrics/getQueryRange'; + +interface IQueryRangeFormat { + compositeQuery: ICompositeMetricQuery; +} + +export const getQueryRangeFormat = ( + props?: Partial, +): Promise => + axios + .post>('/query_range/format', props) + .then((res) => res.data.data); diff --git a/signoz/frontend/src/api/dashboard/unlockDashboard.ts b/signoz/frontend/src/api/dashboard/unlockDashboard.ts new file mode 100644 index 0000000..fd4ffbe --- /dev/null +++ b/signoz/frontend/src/api/dashboard/unlockDashboard.ts @@ -0,0 +1,11 @@ +import axios from 'api'; +import { AxiosResponse } from 'axios'; + +interface UnlockDashboardProps { + uuid: string; +} + +const unlockDashboard = (props: UnlockDashboardProps): Promise => + axios.put(`/dashboards/${props.uuid}/unlock`); + +export default unlockDashboard; diff --git a/signoz/frontend/src/api/dashboard/update.ts b/signoz/frontend/src/api/dashboard/update.ts new file mode 100644 index 0000000..21216e0 --- /dev/null +++ b/signoz/frontend/src/api/dashboard/update.ts @@ -0,0 +1,20 @@ +import axios from 'api'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/dashboard/update'; + +const updateDashboard = async ( + props: Props, +): Promise | ErrorResponse> => { + const response = await axios.put(`/dashboards/${props.uuid}`, { + ...props.data, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; +}; + +export default updateDashboard; diff --git a/signoz/frontend/src/api/dashboard/variables/dashboardVariablesQuery.ts b/signoz/frontend/src/api/dashboard/variables/dashboardVariablesQuery.ts new file mode 100644 index 0000000..8605ce7 --- /dev/null +++ b/signoz/frontend/src/api/dashboard/variables/dashboardVariablesQuery.ts @@ -0,0 +1,30 @@ +import { ApiV2Instance as axios } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + Props, + VariableResponseProps, +} from 'types/api/dashboard/variables/query'; + +const dashboardVariablesQuery = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post(`/variables/query`, props); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + const formattedError = ErrorResponseHandler(error as AxiosError); + + // eslint-disable-next-line @typescript-eslint/no-throw-literal + throw { message: 'Error fetching data', details: formattedError }; + } +}; + +export default dashboardVariablesQuery; diff --git a/signoz/frontend/src/api/disks/getDisks.ts b/signoz/frontend/src/api/disks/getDisks.ts new file mode 100644 index 0000000..9dced1b --- /dev/null +++ b/signoz/frontend/src/api/disks/getDisks.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/disks/getDisks'; + +const getDisks = async (): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.get(`/disks`); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getDisks; diff --git a/signoz/frontend/src/api/dynamicConfigs/getDynamicConfigs.ts b/signoz/frontend/src/api/dynamicConfigs/getDynamicConfigs.ts new file mode 100644 index 0000000..149c113 --- /dev/null +++ b/signoz/frontend/src/api/dynamicConfigs/getDynamicConfigs.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/dynamicConfigs/getDynamicConfigs'; + +const getDynamicConfigs = async (): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.get(`/configs`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getDynamicConfigs; diff --git a/signoz/frontend/src/api/errors/getAll.ts b/signoz/frontend/src/api/errors/getAll.ts new file mode 100644 index 0000000..8d6793e --- /dev/null +++ b/signoz/frontend/src/api/errors/getAll.ts @@ -0,0 +1,34 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/errors/getAll'; + +const getAll = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post(`/listErrors`, { + start: `${props.start}`, + end: `${props.end}`, + order: props.order, + orderParam: props.orderParam, + limit: props.limit, + offset: props.offset, + exceptionType: props.exceptionType, + serviceName: props.serviceName, + tags: props.tags, + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getAll; diff --git a/signoz/frontend/src/api/errors/getByErrorTypeAndService.ts b/signoz/frontend/src/api/errors/getByErrorTypeAndService.ts new file mode 100644 index 0000000..c9a710f --- /dev/null +++ b/signoz/frontend/src/api/errors/getByErrorTypeAndService.ts @@ -0,0 +1,29 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import createQueryParams from 'lib/createQueryParams'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/errors/getByErrorTypeAndService'; + +const getByErrorType = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/errorFromGroupID?${createQueryParams({ + ...props, + })}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.message, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getByErrorType; diff --git a/signoz/frontend/src/api/errors/getById.ts b/signoz/frontend/src/api/errors/getById.ts new file mode 100644 index 0000000..ab0bae3 --- /dev/null +++ b/signoz/frontend/src/api/errors/getById.ts @@ -0,0 +1,29 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import createQueryParams from 'lib/createQueryParams'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/errors/getByErrorId'; + +const getById = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/errorFromErrorID?${createQueryParams({ + ...props, + })}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.message, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getById; diff --git a/signoz/frontend/src/api/errors/getErrorCounts.ts b/signoz/frontend/src/api/errors/getErrorCounts.ts new file mode 100644 index 0000000..977eeb2 --- /dev/null +++ b/signoz/frontend/src/api/errors/getErrorCounts.ts @@ -0,0 +1,30 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/errors/getErrorCounts'; + +const getErrorCounts = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post(`/countErrors`, { + start: `${props.start}`, + end: `${props.end}`, + exceptionType: props.exceptionType, + serviceName: props.serviceName, + tags: props.tags, + }); + + return { + statusCode: 200, + error: null, + message: response.data.message, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getErrorCounts; diff --git a/signoz/frontend/src/api/errors/getNextPrevId.ts b/signoz/frontend/src/api/errors/getNextPrevId.ts new file mode 100644 index 0000000..07798c5 --- /dev/null +++ b/signoz/frontend/src/api/errors/getNextPrevId.ts @@ -0,0 +1,29 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import createQueryParams from 'lib/createQueryParams'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/errors/getNextPrevId'; + +const getErrorCounts = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/nextPrevErrorIDs?${createQueryParams({ + ...props, + })}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.message, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getErrorCounts; diff --git a/signoz/frontend/src/api/features/getFeatureFlags.ts b/signoz/frontend/src/api/features/getFeatureFlags.ts new file mode 100644 index 0000000..2ce37b9 --- /dev/null +++ b/signoz/frontend/src/api/features/getFeatureFlags.ts @@ -0,0 +1,10 @@ +import axios from 'api'; +import { ApiResponse } from 'types/api'; +import { FeatureFlagProps } from 'types/api/features/getFeaturesFlags'; + +const getFeaturesFlags = (): Promise => + axios + .get>(`/featureFlags`) + .then((response) => response.data.data); + +export default getFeaturesFlags; diff --git a/signoz/frontend/src/api/index.ts b/signoz/frontend/src/api/index.ts new file mode 100644 index 0000000..7f5e2d4 --- /dev/null +++ b/signoz/frontend/src/api/index.ts @@ -0,0 +1,179 @@ +/* eslint-disable sonarjs/cognitive-complexity */ +/* eslint-disable no-param-reassign */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import getLocalStorageApi from 'api/browser/localstorage/get'; +import loginApi from 'api/user/login'; +import afterLogin from 'AppRoutes/utils'; +import axios, { AxiosResponse, InternalAxiosRequestConfig } from 'axios'; +import { ENVIRONMENT } from 'constants/env'; +import { LOCALSTORAGE } from 'constants/localStorage'; +import store from 'store'; + +import apiV1, { + apiAlertManager, + apiV2, + apiV3, + apiV4, + gatewayApiV1, +} from './apiV1'; +import { Logout } from './utils'; + +const interceptorsResponse = ( + value: AxiosResponse, +): Promise> => Promise.resolve(value); + +const interceptorsRequestResponse = ( + value: InternalAxiosRequestConfig, +): InternalAxiosRequestConfig => { + const token = + store.getState().app.user?.accessJwt || + getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN) || + ''; + + if (value && value.headers) { + value.headers.Authorization = token ? `Bearer ${token}` : ''; + } + + return value; +}; + +const interceptorRejected = async ( + value: AxiosResponse, +): Promise> => { + try { + if (axios.isAxiosError(value) && value.response) { + const { response } = value; + // reject the refresh token error + if (response.status === 401 && response.config.url !== '/login') { + const response = await loginApi({ + refreshToken: store.getState().app.user?.refreshJwt, + }); + + if (response.statusCode === 200) { + const user = await afterLogin( + response.payload.userId, + response.payload.accessJwt, + response.payload.refreshJwt, + ); + + if (user) { + const reResponse = await axios( + `${value.config.baseURL}${value.config.url?.substring(1)}`, + { + method: value.config.method, + headers: { + ...value.config.headers, + Authorization: `Bearer ${response.payload.accessJwt}`, + }, + data: { + ...JSON.parse(value.config.data || '{}'), + }, + }, + ); + + if (reResponse.status === 200) { + return await Promise.resolve(reResponse); + } + Logout(); + + return await Promise.reject(reResponse); + } + Logout(); + + return await Promise.reject(value); + } + Logout(); + } + + // when refresh token is expired + if (response.status === 401 && response.config.url === '/login') { + Logout(); + } + } + return await Promise.reject(value); + } catch (error) { + return await Promise.reject(error); + } +}; + +const interceptorRejectedBase = async ( + value: AxiosResponse, +): Promise> => Promise.reject(value); + +const instance = axios.create({ + baseURL: `${ENVIRONMENT.baseURL}${apiV1}`, +}); + +instance.interceptors.request.use(interceptorsRequestResponse); +instance.interceptors.response.use(interceptorsResponse, interceptorRejected); + +export const AxiosAlertManagerInstance = axios.create({ + baseURL: `${ENVIRONMENT.baseURL}${apiAlertManager}`, +}); + +export const ApiV2Instance = axios.create({ + baseURL: `${ENVIRONMENT.baseURL}${apiV2}`, +}); +ApiV2Instance.interceptors.response.use( + interceptorsResponse, + interceptorRejected, +); +ApiV2Instance.interceptors.request.use(interceptorsRequestResponse); + +// axios V3 +export const ApiV3Instance = axios.create({ + baseURL: `${ENVIRONMENT.baseURL}${apiV3}`, +}); + +ApiV3Instance.interceptors.response.use( + interceptorsResponse, + interceptorRejected, +); +ApiV3Instance.interceptors.request.use(interceptorsRequestResponse); +// + +// axios V4 +export const ApiV4Instance = axios.create({ + baseURL: `${ENVIRONMENT.baseURL}${apiV4}`, +}); + +ApiV4Instance.interceptors.response.use( + interceptorsResponse, + interceptorRejected, +); +ApiV4Instance.interceptors.request.use(interceptorsRequestResponse); +// + +// axios Base +export const ApiBaseInstance = axios.create({ + baseURL: `${ENVIRONMENT.baseURL}${apiV1}`, +}); + +ApiBaseInstance.interceptors.response.use( + interceptorsResponse, + interceptorRejectedBase, +); +ApiBaseInstance.interceptors.request.use(interceptorsRequestResponse); +// + +// gateway Api V1 +export const GatewayApiV1Instance = axios.create({ + baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV1}`, +}); + +GatewayApiV1Instance.interceptors.response.use( + interceptorsResponse, + interceptorRejected, +); + +GatewayApiV1Instance.interceptors.request.use(interceptorsRequestResponse); +// + +AxiosAlertManagerInstance.interceptors.response.use( + interceptorsResponse, + interceptorRejected, +); +AxiosAlertManagerInstance.interceptors.request.use(interceptorsRequestResponse); + +export { apiV1 }; +export default instance; diff --git a/signoz/frontend/src/api/licenses/apply.ts b/signoz/frontend/src/api/licenses/apply.ts new file mode 100644 index 0000000..d0ac8528 --- /dev/null +++ b/signoz/frontend/src/api/licenses/apply.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/licenses/apply'; + +const apply = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/licenses', { + key: props.key, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default apply; diff --git a/signoz/frontend/src/api/licenses/getAll.ts b/signoz/frontend/src/api/licenses/getAll.ts new file mode 100644 index 0000000..4782be3 --- /dev/null +++ b/signoz/frontend/src/api/licenses/getAll.ts @@ -0,0 +1,24 @@ +import { ApiV2Instance as axios } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/licenses/getAll'; + +const getAll = async (): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.get('/licenses'); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getAll; diff --git a/signoz/frontend/src/api/logs/AddToSelectedField.ts b/signoz/frontend/src/api/logs/AddToSelectedField.ts new file mode 100644 index 0000000..b2672f7 --- /dev/null +++ b/signoz/frontend/src/api/logs/AddToSelectedField.ts @@ -0,0 +1,23 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/logs/addToSelectedFields'; + +const addToSelectedFields = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const data = await axios.post(`/logs/fields`, props); + return { + statusCode: 200, + error: null, + message: '', + payload: data.data, + }; + } catch (error) { + return Promise.reject(ErrorResponseHandler(error as AxiosError)); + } +}; + +export default addToSelectedFields; diff --git a/signoz/frontend/src/api/logs/GetLogs.ts b/signoz/frontend/src/api/logs/GetLogs.ts new file mode 100644 index 0000000..411e2e3 --- /dev/null +++ b/signoz/frontend/src/api/logs/GetLogs.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/logs/getLogs'; + +const GetLogs = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const data = await axios.get(`/logs`, { + params: props, + }); + + return { + statusCode: 200, + error: null, + message: '', + payload: data.data.results, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default GetLogs; diff --git a/signoz/frontend/src/api/logs/GetLogsAggregate.ts b/signoz/frontend/src/api/logs/GetLogsAggregate.ts new file mode 100644 index 0000000..3385999 --- /dev/null +++ b/signoz/frontend/src/api/logs/GetLogsAggregate.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/logs/getLogsAggregate'; + +const GetLogsAggregate = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const data = await axios.get(`/logs/aggregate`, { + params: props, + }); + + return { + statusCode: 200, + error: null, + message: '', + payload: data.data.items, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default GetLogsAggregate; diff --git a/signoz/frontend/src/api/logs/GetSearchFields.ts b/signoz/frontend/src/api/logs/GetSearchFields.ts new file mode 100644 index 0000000..ce3188b --- /dev/null +++ b/signoz/frontend/src/api/logs/GetSearchFields.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/logs/getSearchFields'; + +const GetSearchFields = async (): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const data = await axios.get(`/logs/fields`); + + return { + statusCode: 200, + error: null, + message: '', + payload: data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default GetSearchFields; diff --git a/signoz/frontend/src/api/logs/RemoveFromSelectedField.ts b/signoz/frontend/src/api/logs/RemoveFromSelectedField.ts new file mode 100644 index 0000000..f417565 --- /dev/null +++ b/signoz/frontend/src/api/logs/RemoveFromSelectedField.ts @@ -0,0 +1,23 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/logs/addToSelectedFields'; + +const removeSelectedField = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const data = await axios.post(`/logs/fields`, props); + return { + statusCode: 200, + error: null, + message: '', + payload: data.data, + }; + } catch (error) { + return Promise.reject(ErrorResponseHandler(error as AxiosError)); + } +}; + +export default removeSelectedField; diff --git a/signoz/frontend/src/api/logs/livetail.ts b/signoz/frontend/src/api/logs/livetail.ts new file mode 100644 index 0000000..150f63d --- /dev/null +++ b/signoz/frontend/src/api/logs/livetail.ts @@ -0,0 +1,19 @@ +import apiV1 from 'api/apiV1'; +import getLocalStorageKey from 'api/browser/localstorage/get'; +import { ENVIRONMENT } from 'constants/env'; +import { LOCALSTORAGE } from 'constants/localStorage'; +import { EventSourcePolyfill } from 'event-source-polyfill'; + +// 10 min in ms +const TIMEOUT_IN_MS = 10 * 60 * 1000; + +export const LiveTail = (queryParams: string): EventSourcePolyfill => + new EventSourcePolyfill( + `${ENVIRONMENT.baseURL}${apiV1}logs/tail?${queryParams}`, + { + headers: { + Authorization: `Bearer ${getLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN)}`, + }, + heartbeatTimeout: TIMEOUT_IN_MS, + }, + ); diff --git a/signoz/frontend/src/api/metrics/ApDex/apDexSettings.ts b/signoz/frontend/src/api/metrics/ApDex/apDexSettings.ts new file mode 100644 index 0000000..e3d69c9 --- /dev/null +++ b/signoz/frontend/src/api/metrics/ApDex/apDexSettings.ts @@ -0,0 +1,16 @@ +import axios from 'api'; +import { + ApDexPayloadAndSettingsProps, + SetApDexPayloadProps, +} from 'types/api/metrics/getApDex'; + +export const setApDexSettings = async ({ + servicename, + threshold, + excludeStatusCode, +}: ApDexPayloadAndSettingsProps): Promise => + axios.post('/settings/apdex', { + servicename, + threshold, + excludeStatusCode, + }); diff --git a/signoz/frontend/src/api/metrics/ApDex/getApDexSettings.ts b/signoz/frontend/src/api/metrics/ApDex/getApDexSettings.ts new file mode 100644 index 0000000..4dcb96c --- /dev/null +++ b/signoz/frontend/src/api/metrics/ApDex/getApDexSettings.ts @@ -0,0 +1,8 @@ +import axios from 'api'; +import { AxiosResponse } from 'axios'; +import { ApDexPayloadAndSettingsProps } from 'types/api/metrics/getApDex'; + +export const getApDexSettings = ( + servicename: string, +): Promise> => + axios.get(`/settings/apdex?services=${servicename}`); diff --git a/signoz/frontend/src/api/metrics/ApDex/getMetricMeta.ts b/signoz/frontend/src/api/metrics/ApDex/getMetricMeta.ts new file mode 100644 index 0000000..90baa61 --- /dev/null +++ b/signoz/frontend/src/api/metrics/ApDex/getMetricMeta.ts @@ -0,0 +1,11 @@ +import { ApiV4Instance } from 'api'; +import { AxiosResponse } from 'axios'; +import { MetricMetaProps } from 'types/api/metrics/getApDex'; + +export const getMetricMeta = ( + metricName: string, + servicename: string, +): Promise> => + ApiV4Instance.get( + `/metric/metric_metadata?metricName=${metricName}&serviceName=${servicename}`, + ); diff --git a/signoz/frontend/src/api/metrics/getDBOverView.ts b/signoz/frontend/src/api/metrics/getDBOverView.ts new file mode 100644 index 0000000..7afd56d --- /dev/null +++ b/signoz/frontend/src/api/metrics/getDBOverView.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/metrics/getDBOverview'; + +const getDBOverView = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/service/dbOverview?&start=${props.start}&end=${props.end}&service=${props.service}&step=${props.step}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getDBOverView; diff --git a/signoz/frontend/src/api/metrics/getExternalAverageDuration.ts b/signoz/frontend/src/api/metrics/getExternalAverageDuration.ts new file mode 100644 index 0000000..51be375 --- /dev/null +++ b/signoz/frontend/src/api/metrics/getExternalAverageDuration.ts @@ -0,0 +1,29 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + PayloadProps, + Props, +} from 'types/api/metrics/getExternalAverageDuration'; + +const getExternalAverageDuration = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/service/externalAvgDuration?&start=${props.start}&end=${props.end}&service=${props.service}&step=${props.step}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getExternalAverageDuration; diff --git a/signoz/frontend/src/api/metrics/getExternalError.ts b/signoz/frontend/src/api/metrics/getExternalError.ts new file mode 100644 index 0000000..3587639 --- /dev/null +++ b/signoz/frontend/src/api/metrics/getExternalError.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/metrics/getExternalError'; + +const getExternalError = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/service/externalErrors?&start=${props.start}&end=${props.end}&service=${props.service}&step=${props.step}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getExternalError; diff --git a/signoz/frontend/src/api/metrics/getExternalService.ts b/signoz/frontend/src/api/metrics/getExternalService.ts new file mode 100644 index 0000000..de9bf65 --- /dev/null +++ b/signoz/frontend/src/api/metrics/getExternalService.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/metrics/getExternalService'; + +const getExternalService = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/service/external?&start=${props.start}&end=${props.end}&service=${props.service}&step=${props.step}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getExternalService; diff --git a/signoz/frontend/src/api/metrics/getQueryRange.ts b/signoz/frontend/src/api/metrics/getQueryRange.ts new file mode 100644 index 0000000..40deb02 --- /dev/null +++ b/signoz/frontend/src/api/metrics/getQueryRange.ts @@ -0,0 +1,41 @@ +import { ApiV3Instance, ApiV4Instance } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ENTITY_VERSION_V4 } from 'constants/app'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + MetricRangePayloadV3, + QueryRangePayload, +} from 'types/api/metrics/getQueryRange'; + +export const getMetricsQueryRange = async ( + props: QueryRangePayload, + version: string, + signal: AbortSignal, +): Promise | ErrorResponse> => { + try { + if (version && version === ENTITY_VERSION_V4) { + const response = await ApiV4Instance.post('/query_range', props, { signal }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + params: props, + }; + } + + const response = await ApiV3Instance.post('/query_range', props, { signal }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + params: props, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; diff --git a/signoz/frontend/src/api/metrics/getResourceAttributes.ts b/signoz/frontend/src/api/metrics/getResourceAttributes.ts new file mode 100644 index 0000000..c482f86 --- /dev/null +++ b/signoz/frontend/src/api/metrics/getResourceAttributes.ts @@ -0,0 +1,61 @@ +import { ApiV3Instance as axios } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import createQueryParams from 'lib/createQueryParams'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + TagKeyProps, + TagKeysPayloadProps, + TagValueProps, + TagValuesPayloadProps, +} from 'types/api/metrics/getResourceAttributes'; +import { DataSource, MetricAggregateOperator } from 'types/common/queryBuilder'; + +export const getResourceAttributesTagKeys = async ( + props: TagKeyProps, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/autocomplete/attribute_keys?${createQueryParams({ + aggregateOperator: MetricAggregateOperator.RATE, + searchText: props.match, + dataSource: DataSource.METRICS, + aggregateAttribute: props.metricName, + })}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export const getResourceAttributesTagValues = async ( + props: TagValueProps, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/autocomplete/attribute_values?${createQueryParams({ + aggregateOperator: MetricAggregateOperator.RATE, + dataSource: DataSource.METRICS, + aggregateAttribute: props.metricName, + attributeKey: props.tagKey, + searchText: '', + })}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; diff --git a/signoz/frontend/src/api/metrics/getService.ts b/signoz/frontend/src/api/metrics/getService.ts new file mode 100644 index 0000000..731da11 --- /dev/null +++ b/signoz/frontend/src/api/metrics/getService.ts @@ -0,0 +1,13 @@ +import axios from 'api'; +import { PayloadProps, Props } from 'types/api/metrics/getService'; + +const getService = async (props: Props): Promise => { + const response = await axios.post(`/services`, { + start: `${props.start}`, + end: `${props.end}`, + tags: props.selectedTags, + }); + return response.data; +}; + +export default getService; diff --git a/signoz/frontend/src/api/metrics/getServiceOverview.ts b/signoz/frontend/src/api/metrics/getServiceOverview.ts new file mode 100644 index 0000000..47febaa --- /dev/null +++ b/signoz/frontend/src/api/metrics/getServiceOverview.ts @@ -0,0 +1,16 @@ +import axios from 'api'; +import { PayloadProps, Props } from 'types/api/metrics/getServiceOverview'; + +const getServiceOverview = async (props: Props): Promise => { + const response = await axios.post(`/service/overview`, { + start: `${props.start}`, + end: `${props.end}`, + service: props.service, + step: props.step, + tags: props.selectedTags, + }); + + return response.data; +}; + +export default getServiceOverview; diff --git a/signoz/frontend/src/api/metrics/getTopLevelOperations.ts b/signoz/frontend/src/api/metrics/getTopLevelOperations.ts new file mode 100644 index 0000000..2f5a202 --- /dev/null +++ b/signoz/frontend/src/api/metrics/getTopLevelOperations.ts @@ -0,0 +1,12 @@ +import axios from 'api'; + +const getTopLevelOperations = async (): Promise => { + const response = await axios.post(`/service/top_level_operations`); + return response.data; +}; + +export type ServiceDataProps = { + [serviceName: string]: string[]; +}; + +export default getTopLevelOperations; diff --git a/signoz/frontend/src/api/metrics/getTopOperations.ts b/signoz/frontend/src/api/metrics/getTopOperations.ts new file mode 100644 index 0000000..9c85602 --- /dev/null +++ b/signoz/frontend/src/api/metrics/getTopOperations.ts @@ -0,0 +1,15 @@ +import axios from 'api'; +import { PayloadProps, Props } from 'types/api/metrics/getTopOperations'; + +const getTopOperations = async (props: Props): Promise => { + const response = await axios.post(`/service/top_operations`, { + start: `${props.start}`, + end: `${props.end}`, + service: props.service, + tags: props.selectedTags, + }); + + return response.data; +}; + +export default getTopOperations; diff --git a/signoz/frontend/src/api/pipeline/get.ts b/signoz/frontend/src/api/pipeline/get.ts new file mode 100644 index 0000000..ff4dd7f --- /dev/null +++ b/signoz/frontend/src/api/pipeline/get.ts @@ -0,0 +1,25 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { Pipeline } from 'types/api/pipeline/def'; +import { Props } from 'types/api/pipeline/get'; + +const get = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get(`/logs/pipelines/${props.version}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response?.data?.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default get; diff --git a/signoz/frontend/src/api/pipeline/post.ts b/signoz/frontend/src/api/pipeline/post.ts new file mode 100644 index 0000000..c2e7ca2 --- /dev/null +++ b/signoz/frontend/src/api/pipeline/post.ts @@ -0,0 +1,25 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { Pipeline } from 'types/api/pipeline/def'; +import { Props } from 'types/api/pipeline/post'; + +const post = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/logs/pipelines', props.data); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default post; diff --git a/signoz/frontend/src/api/pipeline/preview.ts b/signoz/frontend/src/api/pipeline/preview.ts new file mode 100644 index 0000000..a349ecd --- /dev/null +++ b/signoz/frontend/src/api/pipeline/preview.ts @@ -0,0 +1,21 @@ +import axios from 'api'; +import { ILog } from 'types/api/logs/log'; +import { PipelineData } from 'types/api/pipeline/def'; + +export interface PipelineSimulationRequest { + logs: ILog[]; + pipelines: PipelineData[]; +} + +export interface PipelineSimulationResponse { + logs: ILog[]; +} + +const simulatePipelineProcessing = async ( + requestBody: PipelineSimulationRequest, +): Promise => + axios + .post('/logs/pipelines/preview', requestBody) + .then((res) => res.data.data); + +export default simulatePipelineProcessing; diff --git a/signoz/frontend/src/api/plannedDowntime/createDowntimeSchedule.ts b/signoz/frontend/src/api/plannedDowntime/createDowntimeSchedule.ts new file mode 100644 index 0000000..128fb9b --- /dev/null +++ b/signoz/frontend/src/api/plannedDowntime/createDowntimeSchedule.ts @@ -0,0 +1,44 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; + +import { Recurrence } from './getAllDowntimeSchedules'; + +export interface DowntimeSchedulePayload { + name: string; + description?: string; + alertIds: string[]; + schedule: { + timezone?: string; + startTime?: string; + endTime?: string; + recurrence?: Recurrence; + }; +} + +export interface PayloadProps { + status: string; + data: string; +} + +const createDowntimeSchedule = async ( + props: DowntimeSchedulePayload, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/downtime_schedules', { + ...props, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default createDowntimeSchedule; diff --git a/signoz/frontend/src/api/plannedDowntime/deleteDowntimeSchedule.ts b/signoz/frontend/src/api/plannedDowntime/deleteDowntimeSchedule.ts new file mode 100644 index 0000000..81c3602 --- /dev/null +++ b/signoz/frontend/src/api/plannedDowntime/deleteDowntimeSchedule.ts @@ -0,0 +1,19 @@ +import axios from 'api'; +import { useMutation, UseMutationResult } from 'react-query'; + +export interface DeleteDowntimeScheduleProps { + id?: number; +} + +export interface DeleteSchedulePayloadProps { + status: string; + data: string; +} + +export const useDeleteDowntimeSchedule = ( + props: DeleteDowntimeScheduleProps, +): UseMutationResult => + useMutation({ + mutationKey: [props.id], + mutationFn: () => axios.delete(`/downtime_schedules/${props.id}`), + }); diff --git a/signoz/frontend/src/api/plannedDowntime/getAllDowntimeSchedules.ts b/signoz/frontend/src/api/plannedDowntime/getAllDowntimeSchedules.ts new file mode 100644 index 0000000..8e77606 --- /dev/null +++ b/signoz/frontend/src/api/plannedDowntime/getAllDowntimeSchedules.ts @@ -0,0 +1,50 @@ +import axios from 'api'; +import { AxiosError, AxiosResponse } from 'axios'; +import { Option } from 'container/PlannedDowntime/DropdownWithSubMenu/DropdownWithSubMenu'; +import { useQuery, UseQueryResult } from 'react-query'; + +export type Recurrence = { + startTime?: string | null; + endTime?: string | null; + duration?: number | string | null; + repeatType?: string | Option | null; + repeatOn?: string[] | null; +}; + +type Schedule = { + timezone: string | null; + startTime: string | null; + endTime: string | null; + recurrence: Recurrence | null; +}; + +export interface DowntimeSchedules { + id: number; + name: string | null; + description: string | null; + schedule: Schedule | null; + alertIds: string[] | null; + createdAt: string | null; + createdBy: string | null; + updatedAt: string | null; + updatedBy: string | null; +} +export type PayloadProps = { data: DowntimeSchedules[] }; + +export const getAllDowntimeSchedules = async ( + props?: GetAllDowntimeSchedulesPayloadProps, +): Promise> => + axios.get('/downtime_schedules', { params: props }); + +export interface GetAllDowntimeSchedulesPayloadProps { + active?: boolean; + recurrence?: boolean; +} + +export const useGetAllDowntimeSchedules = ( + props?: GetAllDowntimeSchedulesPayloadProps, +): UseQueryResult, AxiosError> => + useQuery, AxiosError>({ + queryKey: ['getAllDowntimeSchedules', props], + queryFn: () => getAllDowntimeSchedules(props), + }); diff --git a/signoz/frontend/src/api/plannedDowntime/updateDowntimeSchedule.ts b/signoz/frontend/src/api/plannedDowntime/updateDowntimeSchedule.ts new file mode 100644 index 0000000..3fc747a --- /dev/null +++ b/signoz/frontend/src/api/plannedDowntime/updateDowntimeSchedule.ts @@ -0,0 +1,37 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; + +import { DowntimeSchedulePayload } from './createDowntimeSchedule'; + +export interface DowntimeScheduleUpdatePayload { + data: DowntimeSchedulePayload; + id?: number; +} + +export interface PayloadProps { + status: string; + data: string; +} + +const updateDowntimeSchedule = async ( + props: DowntimeScheduleUpdatePayload, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/downtime_schedules/${props.id}`, { + ...props.data, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default updateDowntimeSchedule; diff --git a/signoz/frontend/src/api/queryBuilder/getAggregateAttribute.ts b/signoz/frontend/src/api/queryBuilder/getAggregateAttribute.ts new file mode 100644 index 0000000..f13c3da --- /dev/null +++ b/signoz/frontend/src/api/queryBuilder/getAggregateAttribute.ts @@ -0,0 +1,49 @@ +import { ApiV3Instance } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError, AxiosResponse } from 'axios'; +import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder'; +import { createIdFromObjectFields } from 'lib/createIdFromObjectFields'; +import createQueryParams from 'lib/createQueryParams'; +// ** Helpers +import { ErrorResponse, SuccessResponse } from 'types/api'; +// ** Types +import { IGetAggregateAttributePayload } from 'types/api/queryBuilder/getAggregatorAttribute'; +import { + BaseAutocompleteData, + IQueryAutocompleteResponse, +} from 'types/api/queryBuilder/queryAutocompleteResponse'; + +export const getAggregateAttribute = async ({ + aggregateOperator, + searchText, + dataSource, +}: IGetAggregateAttributePayload): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response: AxiosResponse<{ + data: IQueryAutocompleteResponse; + }> = await ApiV3Instance.get( + `/autocomplete/aggregate_attributes?${createQueryParams({ + aggregateOperator, + searchText, + dataSource, + })}`, + ); + + const payload: BaseAutocompleteData[] = + response.data.data.attributeKeys?.map(({ id: _, ...item }) => ({ + ...item, + id: createIdFromObjectFields(item, baseAutoCompleteIdKeysOrder), + })) || []; + + return { + statusCode: 200, + error: null, + message: response.statusText, + payload: { attributeKeys: payload }, + }; + } catch (e) { + return ErrorResponseHandler(e as AxiosError); + } +}; diff --git a/signoz/frontend/src/api/queryBuilder/getAttributeKeys.ts b/signoz/frontend/src/api/queryBuilder/getAttributeKeys.ts new file mode 100644 index 0000000..9cc127b --- /dev/null +++ b/signoz/frontend/src/api/queryBuilder/getAttributeKeys.ts @@ -0,0 +1,51 @@ +import { ApiV3Instance } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError, AxiosResponse } from 'axios'; +import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder'; +import { createIdFromObjectFields } from 'lib/createIdFromObjectFields'; +import createQueryParams from 'lib/createQueryParams'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +// ** Types +import { IGetAttributeKeysPayload } from 'types/api/queryBuilder/getAttributeKeys'; +import { + BaseAutocompleteData, + IQueryAutocompleteResponse, +} from 'types/api/queryBuilder/queryAutocompleteResponse'; + +export const getAggregateKeys = async ({ + aggregateOperator, + searchText, + dataSource, + aggregateAttribute, + tagType, +}: IGetAttributeKeysPayload): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response: AxiosResponse<{ + data: IQueryAutocompleteResponse; + }> = await ApiV3Instance.get( + `/autocomplete/attribute_keys?${createQueryParams({ + aggregateOperator, + searchText, + dataSource, + aggregateAttribute, + })}&tagType=${tagType}`, + ); + + const payload: BaseAutocompleteData[] = + response.data.data.attributeKeys?.map(({ id: _, ...item }) => ({ + ...item, + id: createIdFromObjectFields(item, baseAutoCompleteIdKeysOrder), + })) || []; + + return { + statusCode: 200, + error: null, + message: response.statusText, + payload: { attributeKeys: payload }, + }; + } catch (e) { + return ErrorResponseHandler(e as AxiosError); + } +}; diff --git a/signoz/frontend/src/api/queryBuilder/getAttributesValues.ts b/signoz/frontend/src/api/queryBuilder/getAttributesValues.ts new file mode 100644 index 0000000..216da1e --- /dev/null +++ b/signoz/frontend/src/api/queryBuilder/getAttributesValues.ts @@ -0,0 +1,42 @@ +import { ApiV3Instance } from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import createQueryParams from 'lib/createQueryParams'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + IAttributeValuesResponse, + IGetAttributeValuesPayload, +} from 'types/api/queryBuilder/getAttributesValues'; + +export const getAttributesValues = async ({ + aggregateOperator, + dataSource, + aggregateAttribute, + attributeKey, + filterAttributeKeyDataType, + tagType, + searchText, +}: IGetAttributeValuesPayload): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await ApiV3Instance.get( + `/autocomplete/attribute_values?${createQueryParams({ + aggregateOperator, + dataSource, + aggregateAttribute, + attributeKey, + searchText, + })}&filterAttributeKeyDataType=${filterAttributeKeyDataType}&tagType=${tagType}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; diff --git a/signoz/frontend/src/api/saveView/deleteView.ts b/signoz/frontend/src/api/saveView/deleteView.ts new file mode 100644 index 0000000..9317c83 --- /dev/null +++ b/signoz/frontend/src/api/saveView/deleteView.ts @@ -0,0 +1,5 @@ +import axios from 'api'; +import { DeleteViewPayloadProps } from 'types/api/saveViews/types'; + +export const deleteView = (uuid: string): Promise => + axios.delete(`/explorer/views/${uuid}`); diff --git a/signoz/frontend/src/api/saveView/getAllViews.ts b/signoz/frontend/src/api/saveView/getAllViews.ts new file mode 100644 index 0000000..4a54d6a --- /dev/null +++ b/signoz/frontend/src/api/saveView/getAllViews.ts @@ -0,0 +1,9 @@ +import axios from 'api'; +import { AxiosResponse } from 'axios'; +import { AllViewsProps } from 'types/api/saveViews/types'; +import { DataSource } from 'types/common/queryBuilder'; + +export const getAllViews = ( + sourcepage: DataSource, +): Promise> => + axios.get(`/explorer/views?sourcePage=${sourcepage}`); diff --git a/signoz/frontend/src/api/saveView/saveView.ts b/signoz/frontend/src/api/saveView/saveView.ts new file mode 100644 index 0000000..60a552f --- /dev/null +++ b/signoz/frontend/src/api/saveView/saveView.ts @@ -0,0 +1,16 @@ +import axios from 'api'; +import { AxiosResponse } from 'axios'; +import { SaveViewPayloadProps, SaveViewProps } from 'types/api/saveViews/types'; + +export const saveView = ({ + compositeQuery, + sourcePage, + viewName, + extraData, +}: SaveViewProps): Promise> => + axios.post('/explorer/views', { + name: viewName, + sourcePage, + compositeQuery, + extraData, + }); diff --git a/signoz/frontend/src/api/saveView/updateView.ts b/signoz/frontend/src/api/saveView/updateView.ts new file mode 100644 index 0000000..b48b73f --- /dev/null +++ b/signoz/frontend/src/api/saveView/updateView.ts @@ -0,0 +1,19 @@ +import axios from 'api'; +import { + UpdateViewPayloadProps, + UpdateViewProps, +} from 'types/api/saveViews/types'; + +export const updateView = ({ + compositeQuery, + viewName, + extraData, + sourcePage, + viewKey, +}: UpdateViewProps): Promise => + axios.put(`/explorer/views/${viewKey}`, { + name: viewName, + compositeQuery, + extraData, + sourcePage, + }); diff --git a/signoz/frontend/src/api/settings/getIngestionData.ts b/signoz/frontend/src/api/settings/getIngestionData.ts new file mode 100644 index 0000000..c35b964 --- /dev/null +++ b/signoz/frontend/src/api/settings/getIngestionData.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { IngestionResponseProps } from 'types/api/settings/ingestion'; + +const getIngestionData = async (): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.get(`/settings/ingestion_key`); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getIngestionData; diff --git a/signoz/frontend/src/api/settings/getRetention.ts b/signoz/frontend/src/api/settings/getRetention.ts new file mode 100644 index 0000000..d19ab1a --- /dev/null +++ b/signoz/frontend/src/api/settings/getRetention.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/settings/getRetention'; + +const getRetention = async ( + props: T, +): Promise> | ErrorResponse> => { + try { + const response = await axios.get>( + `/settings/ttl?type=${props}`, + ); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getRetention; diff --git a/signoz/frontend/src/api/settings/setRetention.ts b/signoz/frontend/src/api/settings/setRetention.ts new file mode 100644 index 0000000..481760b --- /dev/null +++ b/signoz/frontend/src/api/settings/setRetention.ts @@ -0,0 +1,30 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/settings/setRetention'; + +const setRetention = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post( + `/settings/ttl?duration=${props.totalDuration}&type=${props.type}${ + props.coldStorage + ? `&coldStorage=${props.coldStorage}&toColdDuration=${props.toColdDuration}` + : '' + }`, + ); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default setRetention; diff --git a/signoz/frontend/src/api/trace/getFilters.ts b/signoz/frontend/src/api/trace/getFilters.ts new file mode 100644 index 0000000..7eeb940 --- /dev/null +++ b/signoz/frontend/src/api/trace/getFilters.ts @@ -0,0 +1,49 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import omitBy from 'lodash-es/omitBy'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/trace/getFilters'; + +const getFilters = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const duration = + omitBy(props.other, (_, key) => !key.startsWith('duration')) || []; + + const nonDuration = omitBy(props.other, (_, key) => + key.startsWith('duration'), + ); + + const exclude: string[] = []; + + props.isFilterExclude.forEach((value, key) => { + if (value) { + exclude.push(key); + } + }); + + const response = await axios.post(`/getSpanFilters`, { + start: props.start, + end: props.end, + getFilters: props.getFilters, + ...nonDuration, + maxDuration: String((duration.duration || [])[0] || ''), + minDuration: String((duration.duration || [])[1] || ''), + exclude, + spanKind: props.spanKind, + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getFilters; diff --git a/signoz/frontend/src/api/trace/getSpans.ts b/signoz/frontend/src/api/trace/getSpans.ts new file mode 100644 index 0000000..261b265 --- /dev/null +++ b/signoz/frontend/src/api/trace/getSpans.ts @@ -0,0 +1,62 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import omitBy from 'lodash-es/omitBy'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/trace/getSpans'; + +const getSpans = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const updatedSelectedTags = props.selectedTags.map((e) => ({ + Key: `${e.Key}.(string)`, + Operator: e.Operator, + StringValues: e.StringValues, + NumberValues: e.NumberValues, + BoolValues: e.BoolValues, + })); + + const exclude: string[] = []; + + props.isFilterExclude.forEach((value, key) => { + if (value) { + exclude.push(key); + } + }); + + const other = Object.fromEntries(props.selectedFilter); + + const duration = omitBy(other, (_, key) => !key.startsWith('duration')) || []; + + const nonDuration = omitBy(other, (_, key) => key.startsWith('duration')); + + const response = await axios.post( + `/getFilteredSpans/aggregates`, + { + start: String(props.start), + end: String(props.end), + function: props.function, + groupBy: props.groupBy === 'none' ? '' : props.groupBy, + step: props.step, + tags: updatedSelectedTags, + ...nonDuration, + maxDuration: String((duration.duration || [])[0] || ''), + minDuration: String((duration.duration || [])[1] || ''), + exclude, + spanKind: props.spanKind, + }, + ); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getSpans; diff --git a/signoz/frontend/src/api/trace/getSpansAggregate.ts b/signoz/frontend/src/api/trace/getSpansAggregate.ts new file mode 100644 index 0000000..7f24560 --- /dev/null +++ b/signoz/frontend/src/api/trace/getSpansAggregate.ts @@ -0,0 +1,65 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import omitBy from 'lodash-es/omitBy'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/trace/getSpanAggregate'; +import { TraceFilterEnum } from 'types/reducer/trace'; + +const getSpanAggregate = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const preProps = { + start: String(props.start), + end: String(props.end), + limit: props.limit, + offset: props.offset, + order: props.order, + orderParam: props.orderParam, + }; + + const exclude: TraceFilterEnum[] = []; + + props.isFilterExclude.forEach((value, key) => { + if (value) { + exclude.push(key); + } + }); + + const updatedSelectedTags = props.selectedTags.map((e) => ({ + Key: `${e.Key}.(string)`, + Operator: e.Operator, + StringValues: e.StringValues, + NumberValues: e.NumberValues, + BoolValues: e.BoolValues, + })); + + const other = Object.fromEntries(props.selectedFilter); + + const duration = omitBy(other, (_, key) => !key.startsWith('duration')) || []; + + const nonDuration = omitBy(other, (_, key) => key.startsWith('duration')); + + const response = await axios.post(`/getFilteredSpans`, { + ...preProps, + tags: updatedSelectedTags, + ...nonDuration, + maxDuration: String((duration.duration || [])[0] || ''), + minDuration: String((duration.duration || [])[1] || ''), + exclude, + spanKind: props.spanKind, + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getSpanAggregate; diff --git a/signoz/frontend/src/api/trace/getTagFilter.ts b/signoz/frontend/src/api/trace/getTagFilter.ts new file mode 100644 index 0000000..2f53ab5 --- /dev/null +++ b/signoz/frontend/src/api/trace/getTagFilter.ts @@ -0,0 +1,49 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { omitBy } from 'lodash-es'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/trace/getTagFilters'; +import { TraceFilterEnum } from 'types/reducer/trace'; + +const getTagFilters = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const duration = + omitBy(props.other, (_, key) => !key.startsWith('duration')) || []; + + const exclude: TraceFilterEnum[] = []; + + props.isFilterExclude.forEach((value, key) => { + if (value) { + exclude.push(key); + } + }); + + const nonDuration = omitBy(props.other, (_, key) => + key.startsWith('duration'), + ); + + const response = await axios.post(`/getTagFilters`, { + start: String(props.start), + end: String(props.end), + ...nonDuration, + maxDuration: String((duration.duration || [])[0] || ''), + minDuration: String((duration.duration || [])[1] || ''), + exclude, + spanKind: props.spanKind, + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getTagFilters; diff --git a/signoz/frontend/src/api/trace/getTagValue.ts b/signoz/frontend/src/api/trace/getTagValue.ts new file mode 100644 index 0000000..2519c7a --- /dev/null +++ b/signoz/frontend/src/api/trace/getTagValue.ts @@ -0,0 +1,31 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/trace/getTagValue'; + +const getTagValue = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post(`/getTagValues`, { + start: props.start.toString(), + end: props.end.toString(), + tagKey: { + Key: props.tagKey.Key, + Type: props.tagKey.Type, + }, + spanKind: props.spanKind, + }); + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getTagValue; diff --git a/signoz/frontend/src/api/trace/getTraceItem.ts b/signoz/frontend/src/api/trace/getTraceItem.ts new file mode 100644 index 0000000..054c809 --- /dev/null +++ b/signoz/frontend/src/api/trace/getTraceItem.ts @@ -0,0 +1,32 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { formUrlParams } from 'container/TraceDetail/utils'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { GetTraceItemProps, PayloadProps } from 'types/api/trace/getTraceItem'; + +const getTraceItem = async ( + props: GetTraceItemProps, +): Promise | ErrorResponse> => { + try { + const response = await axios.request({ + url: `/traces/${props.id}${formUrlParams({ + spanId: props.spanId, + levelUp: props.levelUp, + levelDown: props.levelDown, + })}`, + method: 'get', + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getTraceItem; diff --git a/signoz/frontend/src/api/user/changeMyPassword.ts b/signoz/frontend/src/api/user/changeMyPassword.ts new file mode 100644 index 0000000..cdca2cd --- /dev/null +++ b/signoz/frontend/src/api/user/changeMyPassword.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/changeMyPassword'; + +const changeMyPassword = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post(`/changePassword/${props.userId}`, { + ...props, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default changeMyPassword; diff --git a/signoz/frontend/src/api/user/deleteInvite.ts b/signoz/frontend/src/api/user/deleteInvite.ts new file mode 100644 index 0000000..16233ef --- /dev/null +++ b/signoz/frontend/src/api/user/deleteInvite.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/deleteInvite'; + +const deleteInvite = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.delete(`/invite/${props.email}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default deleteInvite; diff --git a/signoz/frontend/src/api/user/deleteUser.ts b/signoz/frontend/src/api/user/deleteUser.ts new file mode 100644 index 0000000..4eb2694 --- /dev/null +++ b/signoz/frontend/src/api/user/deleteUser.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/deleteUser'; + +const deleteUser = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.delete(`/user/${props.userId}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default deleteUser; diff --git a/signoz/frontend/src/api/user/editOrg.ts b/signoz/frontend/src/api/user/editOrg.ts new file mode 100644 index 0000000..da980ac --- /dev/null +++ b/signoz/frontend/src/api/user/editOrg.ts @@ -0,0 +1,28 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/editOrg'; + +const editOrg = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/org/${props.orgId}`, { + name: props.name, + isAnonymous: props.isAnonymous, + hasOptedUpdates: props.hasOptedUpdates, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default editOrg; diff --git a/signoz/frontend/src/api/user/editUser.ts b/signoz/frontend/src/api/user/editUser.ts new file mode 100644 index 0000000..88f7c40 --- /dev/null +++ b/signoz/frontend/src/api/user/editUser.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/editUser'; + +const editUser = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/user/${props.userId}`, { + Name: props.name, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default editUser; diff --git a/signoz/frontend/src/api/user/getInviteDetails.ts b/signoz/frontend/src/api/user/getInviteDetails.ts new file mode 100644 index 0000000..22d49cf --- /dev/null +++ b/signoz/frontend/src/api/user/getInviteDetails.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/getInviteDetails'; + +const getInviteDetails = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/invite/${props.inviteId}?ref=${window.location.href}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getInviteDetails; diff --git a/signoz/frontend/src/api/user/getLatestVersion.ts b/signoz/frontend/src/api/user/getLatestVersion.ts new file mode 100644 index 0000000..28a72f7 --- /dev/null +++ b/signoz/frontend/src/api/user/getLatestVersion.ts @@ -0,0 +1,25 @@ +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import axios, { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/user/getLatestVersion'; + +const getLatestVersion = async (): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.get( + `https://api.github.com/repos/signoz/signoz/releases/latest`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getLatestVersion; diff --git a/signoz/frontend/src/api/user/getOrgUser.ts b/signoz/frontend/src/api/user/getOrgUser.ts new file mode 100644 index 0000000..8956adc --- /dev/null +++ b/signoz/frontend/src/api/user/getOrgUser.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/getOrgMembers'; + +const getOrgUser = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get(`/orgUsers/${props.orgId}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getOrgUser; diff --git a/signoz/frontend/src/api/user/getOrganization.ts b/signoz/frontend/src/api/user/getOrganization.ts new file mode 100644 index 0000000..dfda5e4 --- /dev/null +++ b/signoz/frontend/src/api/user/getOrganization.ts @@ -0,0 +1,28 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/user/getOrganization'; + +const getOrganization = async ( + token?: string, +): Promise | ErrorResponse> => { + try { + const response = await axios.get(`/org`, { + headers: { + Authorization: `bearer ${token}`, + }, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getOrganization; diff --git a/signoz/frontend/src/api/user/getPendingInvites.ts b/signoz/frontend/src/api/user/getPendingInvites.ts new file mode 100644 index 0000000..947b7bf --- /dev/null +++ b/signoz/frontend/src/api/user/getPendingInvites.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/user/getPendingInvites'; + +const getPendingInvites = async (): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.get(`/invite`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getPendingInvites; diff --git a/signoz/frontend/src/api/user/getPreference.ts b/signoz/frontend/src/api/user/getPreference.ts new file mode 100644 index 0000000..b284eab --- /dev/null +++ b/signoz/frontend/src/api/user/getPreference.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/user/getUserPreference'; + +const getPreference = async (): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.get(`/userPreferences`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getPreference; diff --git a/signoz/frontend/src/api/user/getResetPasswordToken.ts b/signoz/frontend/src/api/user/getResetPasswordToken.ts new file mode 100644 index 0000000..845826e --- /dev/null +++ b/signoz/frontend/src/api/user/getResetPasswordToken.ts @@ -0,0 +1,24 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/getResetPasswordToken'; + +const getResetPasswordToken = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get(`/getResetPasswordToken/${props.userId}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getResetPasswordToken; diff --git a/signoz/frontend/src/api/user/getRoles.ts b/signoz/frontend/src/api/user/getRoles.ts new file mode 100644 index 0000000..0602a0a --- /dev/null +++ b/signoz/frontend/src/api/user/getRoles.ts @@ -0,0 +1,28 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/getUserRole'; + +const getRoles = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get(`/rbac/role/${props.userId}`, { + headers: { + Authorization: `bearer ${props.token}`, + }, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getRoles; diff --git a/signoz/frontend/src/api/user/getUser.ts b/signoz/frontend/src/api/user/getUser.ts new file mode 100644 index 0000000..6bedb78 --- /dev/null +++ b/signoz/frontend/src/api/user/getUser.ts @@ -0,0 +1,28 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/getUser'; + +const getUser = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get(`/user/${props.userId}`, { + headers: { + Authorization: `bearer ${props.token}`, + }, + }); + + return { + statusCode: 200, + error: null, + message: 'Success', + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getUser; diff --git a/signoz/frontend/src/api/user/getVersion.ts b/signoz/frontend/src/api/user/getVersion.ts new file mode 100644 index 0000000..0f3e7f8 --- /dev/null +++ b/signoz/frontend/src/api/user/getVersion.ts @@ -0,0 +1,25 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { getVersion } from 'constants/api'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/user/getVersion'; + +const getVersionApi = async (): Promise< + SuccessResponse | ErrorResponse +> => { + try { + const response = await axios.get(`/${getVersion}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getVersionApi; diff --git a/signoz/frontend/src/api/user/login.ts b/signoz/frontend/src/api/user/login.ts new file mode 100644 index 0000000..4eff883 --- /dev/null +++ b/signoz/frontend/src/api/user/login.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/login'; + +const login = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post(`/login`, { + ...props, + }); + + return { + statusCode: 200, + error: null, + message: response.statusText, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default login; diff --git a/signoz/frontend/src/api/user/loginPrecheck.ts b/signoz/frontend/src/api/user/loginPrecheck.ts new file mode 100644 index 0000000..c0cdc3d --- /dev/null +++ b/signoz/frontend/src/api/user/loginPrecheck.ts @@ -0,0 +1,28 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/loginPrecheck'; + +const loginPrecheck = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/loginPrecheck?email=${encodeURIComponent( + props.email, + )}&ref=${encodeURIComponent(window.location.href)}`, + ); + + return { + statusCode: 200, + error: null, + message: response.statusText, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default loginPrecheck; diff --git a/signoz/frontend/src/api/user/resetPassword.ts b/signoz/frontend/src/api/user/resetPassword.ts new file mode 100644 index 0000000..eb6d275 --- /dev/null +++ b/signoz/frontend/src/api/user/resetPassword.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/resetPassword'; + +const resetPassword = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post(`/resetPassword`, { + ...props, + }); + + return { + statusCode: 200, + error: null, + message: response.statusText, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default resetPassword; diff --git a/signoz/frontend/src/api/user/sendInvite.ts b/signoz/frontend/src/api/user/sendInvite.ts new file mode 100644 index 0000000..9835588 --- /dev/null +++ b/signoz/frontend/src/api/user/sendInvite.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/setInvite'; + +const sendInvite = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post(`/invite`, { + ...props, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default sendInvite; diff --git a/signoz/frontend/src/api/user/setFlags.ts b/signoz/frontend/src/api/user/setFlags.ts new file mode 100644 index 0000000..0ae9b18 --- /dev/null +++ b/signoz/frontend/src/api/user/setFlags.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/setFlags'; + +const setFlags = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.patch(`/user/${props.userId}/flags`, { + ...props.flags, + }); + + return { + statusCode: 200, + error: null, + message: response.data?.status, + payload: response.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default setFlags; diff --git a/signoz/frontend/src/api/user/signup.ts b/signoz/frontend/src/api/user/signup.ts new file mode 100644 index 0000000..fcb483d --- /dev/null +++ b/signoz/frontend/src/api/user/signup.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/user/loginPrecheck'; +import { Props } from 'types/api/user/signup'; + +const signup = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.post(`/register`, { + ...props, + }); + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data?.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default signup; diff --git a/signoz/frontend/src/api/user/updateRole.ts b/signoz/frontend/src/api/user/updateRole.ts new file mode 100644 index 0000000..5d82a3d --- /dev/null +++ b/signoz/frontend/src/api/user/updateRole.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/user/updateRole'; + +const updateRole = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.put(`/rbac/role/${props.userId}`, { + group_name: props.group_name, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default updateRole; diff --git a/signoz/frontend/src/api/userFeedback/sendFeedback.ts b/signoz/frontend/src/api/userFeedback/sendFeedback.ts new file mode 100644 index 0000000..abf8113 --- /dev/null +++ b/signoz/frontend/src/api/userFeedback/sendFeedback.ts @@ -0,0 +1,21 @@ +import axios from 'api'; +import { Props } from 'types/api/userFeedback/sendResponse'; + +const sendFeedback = async (props: Props): Promise => { + const response = await axios.post( + '/feedback', + { + email: props.email, + message: props.message, + }, + { + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + }, + ); + + return response.status; +}; + +export default sendFeedback; diff --git a/signoz/frontend/src/api/utils.ts b/signoz/frontend/src/api/utils.ts new file mode 100644 index 0000000..bd81719 --- /dev/null +++ b/signoz/frontend/src/api/utils.ts @@ -0,0 +1,76 @@ +import deleteLocalStorageKey from 'api/browser/localstorage/remove'; +import { LOCALSTORAGE } from 'constants/localStorage'; +import ROUTES from 'constants/routes'; +import history from 'lib/history'; +import store from 'store'; +import { + LOGGED_IN, + UPDATE_ORG, + UPDATE_USER, + UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN, + UPDATE_USER_ORG_ROLE, +} from 'types/actions/app'; + +export const Logout = (): void => { + deleteLocalStorageKey(LOCALSTORAGE.AUTH_TOKEN); + deleteLocalStorageKey(LOCALSTORAGE.IS_LOGGED_IN); + deleteLocalStorageKey(LOCALSTORAGE.IS_IDENTIFIED_USER); + deleteLocalStorageKey(LOCALSTORAGE.REFRESH_AUTH_TOKEN); + deleteLocalStorageKey(LOCALSTORAGE.LOGGED_IN_USER_EMAIL); + deleteLocalStorageKey(LOCALSTORAGE.LOGGED_IN_USER_NAME); + deleteLocalStorageKey(LOCALSTORAGE.CHAT_SUPPORT); + + store.dispatch({ + type: LOGGED_IN, + payload: { + isLoggedIn: false, + }, + }); + + store.dispatch({ + type: UPDATE_USER_ORG_ROLE, + payload: { + org: null, + role: null, + }, + }); + + store.dispatch({ + type: UPDATE_USER, + payload: { + ROLE: 'VIEWER', + email: '', + name: '', + orgId: '', + orgName: '', + profilePictureURL: '', + userId: '', + userFlags: {}, + }, + }); + + store.dispatch({ + type: UPDATE_USER_ACCESS_REFRESH_ACCESS_TOKEN, + payload: { + accessJwt: '', + refreshJwt: '', + }, + }); + + store.dispatch({ + type: UPDATE_ORG, + payload: { + org: [], + }, + }); + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + if (window && window.Intercom) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + window.Intercom('shutdown'); + } + + history.push(ROUTES.LOGIN); +}; diff --git a/signoz/frontend/src/api/widgets/getQuery.ts b/signoz/frontend/src/api/widgets/getQuery.ts new file mode 100644 index 0000000..a706db3 --- /dev/null +++ b/signoz/frontend/src/api/widgets/getQuery.ts @@ -0,0 +1,26 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps, Props } from 'types/api/widgets/getQuery'; + +const getQuery = async ( + props: Props, +): Promise | ErrorResponse> => { + try { + const response = await axios.get( + `/query_range?query=${props.query}&start=${props.start}&end=${props.end}&step=${props.step}`, + ); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getQuery; diff --git a/signoz/frontend/src/assets/CustomIcons/ApacheIcon.tsx b/signoz/frontend/src/assets/CustomIcons/ApacheIcon.tsx new file mode 100644 index 0000000..42deba9 --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/ApacheIcon.tsx @@ -0,0 +1,176 @@ +export default function ApacheIcon(): JSX.Element { + return ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ); +} diff --git a/signoz/frontend/src/assets/CustomIcons/DockerIcon.tsx b/signoz/frontend/src/assets/CustomIcons/DockerIcon.tsx new file mode 100644 index 0000000..ef91bb7 --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/DockerIcon.tsx @@ -0,0 +1,28 @@ +export default function DockerIcon(): JSX.Element { + return ( + + + + + + + + + + + ); +} diff --git a/signoz/frontend/src/assets/CustomIcons/ElasticSearchIcon.tsx b/signoz/frontend/src/assets/CustomIcons/ElasticSearchIcon.tsx new file mode 100644 index 0000000..d251b7d --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/ElasticSearchIcon.tsx @@ -0,0 +1,36 @@ +export default function ElasticSearchIcon(): JSX.Element { + return ( + + + + + + + + + ); +} diff --git a/signoz/frontend/src/assets/CustomIcons/GrafanaIcon.tsx b/signoz/frontend/src/assets/CustomIcons/GrafanaIcon.tsx new file mode 100644 index 0000000..c1949db --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/GrafanaIcon.tsx @@ -0,0 +1,18 @@ +function GrafanaIcon(): JSX.Element { + return ( + + + + ); +} + +export default GrafanaIcon; diff --git a/signoz/frontend/src/assets/CustomIcons/HerokuIcon.tsx b/signoz/frontend/src/assets/CustomIcons/HerokuIcon.tsx new file mode 100644 index 0000000..3d68fdb --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/HerokuIcon.tsx @@ -0,0 +1,27 @@ +function HerokuIcon(): JSX.Element { + return ( + + + + + + + + + + + ); +} + +export default HerokuIcon; diff --git a/signoz/frontend/src/assets/CustomIcons/JuiceBoxIcon.tsx b/signoz/frontend/src/assets/CustomIcons/JuiceBoxIcon.tsx new file mode 100644 index 0000000..103a48b --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/JuiceBoxIcon.tsx @@ -0,0 +1,82 @@ +function JuiceBoxIcon(): JSX.Element { + return ( + + + + + + + + + + + + + + + + + + + + ); +} + +export default JuiceBoxIcon; diff --git a/signoz/frontend/src/assets/CustomIcons/KubernetesIcon.tsx b/signoz/frontend/src/assets/CustomIcons/KubernetesIcon.tsx new file mode 100644 index 0000000..280febe --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/KubernetesIcon.tsx @@ -0,0 +1,22 @@ +export default function KubernetesIcon(): JSX.Element { + return ( + + + + + ); +} diff --git a/signoz/frontend/src/assets/CustomIcons/MagicBallIcon.tsx b/signoz/frontend/src/assets/CustomIcons/MagicBallIcon.tsx new file mode 100644 index 0000000..e37e4bb --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/MagicBallIcon.tsx @@ -0,0 +1,38 @@ +function MagicBallIcon(): JSX.Element { + return ( + + + + + + + + + ); +} + +export default MagicBallIcon; diff --git a/signoz/frontend/src/assets/CustomIcons/MongoDBIcon.tsx b/signoz/frontend/src/assets/CustomIcons/MongoDBIcon.tsx new file mode 100644 index 0000000..d0bd3c4 --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/MongoDBIcon.tsx @@ -0,0 +1,68 @@ +export default function MongoDBIcon(): JSX.Element { + return ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ); +} diff --git a/signoz/frontend/src/assets/CustomIcons/MySQLIcon.tsx b/signoz/frontend/src/assets/CustomIcons/MySQLIcon.tsx new file mode 100644 index 0000000..1cebec3 --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/MySQLIcon.tsx @@ -0,0 +1,28 @@ +export default function MySQLIcon(): JSX.Element { + return ( + + + + + + + + + + + ); +} diff --git a/signoz/frontend/src/assets/CustomIcons/NginxIcon.tsx b/signoz/frontend/src/assets/CustomIcons/NginxIcon.tsx new file mode 100644 index 0000000..6e93057 --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/NginxIcon.tsx @@ -0,0 +1,22 @@ +function NginxIcon(): JSX.Element { + return ( + + + + + ); +} + +export default NginxIcon; diff --git a/signoz/frontend/src/assets/CustomIcons/PostgreSQLIcon.tsx b/signoz/frontend/src/assets/CustomIcons/PostgreSQLIcon.tsx new file mode 100644 index 0000000..b3c2b1e --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/PostgreSQLIcon.tsx @@ -0,0 +1,40 @@ +export default function PostgreSQLIcon(): JSX.Element { + return ( + + + + + + + + + + + + + + ); +} diff --git a/signoz/frontend/src/assets/CustomIcons/RedisIcon.tsx b/signoz/frontend/src/assets/CustomIcons/RedisIcon.tsx new file mode 100644 index 0000000..714eeb3 --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/RedisIcon.tsx @@ -0,0 +1,60 @@ +export default function RedisIcon(): JSX.Element { + return ( + + + + + + + + + + + + + + + + + + + ); +} diff --git a/signoz/frontend/src/assets/CustomIcons/TentIcon.tsx b/signoz/frontend/src/assets/CustomIcons/TentIcon.tsx new file mode 100644 index 0000000..9271324 --- /dev/null +++ b/signoz/frontend/src/assets/CustomIcons/TentIcon.tsx @@ -0,0 +1,110 @@ +function TentIcon(): JSX.Element { + return ( + + + + + + + + + + + + + + + + + + + + + + + + + + + ); +} + +export default TentIcon; diff --git a/signoz/frontend/src/assets/Dashboard/BarIcon.tsx b/signoz/frontend/src/assets/Dashboard/BarIcon.tsx new file mode 100644 index 0000000..b8e6b3c --- /dev/null +++ b/signoz/frontend/src/assets/Dashboard/BarIcon.tsx @@ -0,0 +1,41 @@ +import { CSSProperties } from 'react'; + +function BarIcon({ + fillColor, +}: { + fillColor: CSSProperties['color']; +}): JSX.Element { + return ( + + + + + + ); +} + +export default BarIcon; diff --git a/signoz/frontend/src/assets/Dashboard/List.tsx b/signoz/frontend/src/assets/Dashboard/List.tsx new file mode 100644 index 0000000..1c4d1d0 --- /dev/null +++ b/signoz/frontend/src/assets/Dashboard/List.tsx @@ -0,0 +1,30 @@ +import { CSSProperties } from 'react'; + +function ListIcon({ + fillColor, +}: { + fillColor: CSSProperties['color']; +}): JSX.Element { + return ( + + + + + + + + + ); +} + +export default ListIcon; diff --git a/signoz/frontend/src/assets/Dashboard/PromQl.tsx b/signoz/frontend/src/assets/Dashboard/PromQl.tsx new file mode 100644 index 0000000..8a942ae --- /dev/null +++ b/signoz/frontend/src/assets/Dashboard/PromQl.tsx @@ -0,0 +1,27 @@ +import { CSSProperties } from 'react'; + +function PromQLIcon({ + fillColor, +}: { + fillColor: CSSProperties['color']; +}): JSX.Element { + return ( + + + + ); +} + +export default PromQLIcon; diff --git a/signoz/frontend/src/assets/Dashboard/Table.tsx b/signoz/frontend/src/assets/Dashboard/Table.tsx new file mode 100644 index 0000000..60effdb --- /dev/null +++ b/signoz/frontend/src/assets/Dashboard/Table.tsx @@ -0,0 +1,48 @@ +import { CSSProperties } from 'react'; + +function TableIcon({ + fillColor, +}: { + fillColor: CSSProperties['color']; +}): JSX.Element { + return ( + + + + + + + ); +} + +export default TableIcon; diff --git a/signoz/frontend/src/assets/Dashboard/TimeSeries.tsx b/signoz/frontend/src/assets/Dashboard/TimeSeries.tsx new file mode 100644 index 0000000..afa9b5f --- /dev/null +++ b/signoz/frontend/src/assets/Dashboard/TimeSeries.tsx @@ -0,0 +1,69 @@ +import { CSSProperties } from 'react'; + +function TimeSeries({ + fillColor, +}: { + fillColor: CSSProperties['color']; +}): JSX.Element { + return ( + + + + + + + + + + ); +} + +export default TimeSeries; diff --git a/signoz/frontend/src/assets/Dashboard/Value.tsx b/signoz/frontend/src/assets/Dashboard/Value.tsx new file mode 100644 index 0000000..39ef8d9 --- /dev/null +++ b/signoz/frontend/src/assets/Dashboard/Value.tsx @@ -0,0 +1,32 @@ +import { CSSProperties } from 'react'; + +function Value({ + fillColor, +}: { + fillColor: CSSProperties['color']; +}): JSX.Element { + return ( + + + + + ); +} + +export default Value; diff --git a/signoz/frontend/src/assets/Integrations/ConfigureIcon.tsx b/signoz/frontend/src/assets/Integrations/ConfigureIcon.tsx new file mode 100644 index 0000000..84ddef5 --- /dev/null +++ b/signoz/frontend/src/assets/Integrations/ConfigureIcon.tsx @@ -0,0 +1,23 @@ +import { Color } from '@signozhq/design-tokens'; +import { useIsDarkMode } from 'hooks/useDarkMode'; + +function ConfigureIcon(): JSX.Element { + const isDarkMode = useIsDarkMode(); + return ( + + + + + + + ); +} + +export default ConfigureIcon; diff --git a/signoz/frontend/src/assets/NotFound.tsx b/signoz/frontend/src/assets/NotFound.tsx new file mode 100644 index 0000000..b8bf4d0 --- /dev/null +++ b/signoz/frontend/src/assets/NotFound.tsx @@ -0,0 +1,14 @@ +function NotFound(): JSX.Element { + return ( + not-found + ); +} + +export default NotFound; diff --git a/signoz/frontend/src/assets/SomethingWentWrong.tsx b/signoz/frontend/src/assets/SomethingWentWrong.tsx new file mode 100644 index 0000000..e6b0d30 --- /dev/null +++ b/signoz/frontend/src/assets/SomethingWentWrong.tsx @@ -0,0 +1,468 @@ +function SomethingWentWrong(): JSX.Element { + return ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ); +} + +export default SomethingWentWrong; diff --git a/signoz/frontend/src/assets/UnAuthorized.tsx b/signoz/frontend/src/assets/UnAuthorized.tsx new file mode 100644 index 0000000..93cfb48 --- /dev/null +++ b/signoz/frontend/src/assets/UnAuthorized.tsx @@ -0,0 +1,26 @@ +function UnAuthorized(): JSX.Element { + return ( + + + + + + ); +} + +export default UnAuthorized; diff --git a/signoz/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss b/signoz/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss new file mode 100644 index 0000000..14f80a9 --- /dev/null +++ b/signoz/frontend/src/components/CustomTimePicker/CustomTimePicker.styles.scss @@ -0,0 +1,121 @@ +.custom-time-picker { + display: flex; + flex-direction: column; +} + +.time-options-container { + .time-options-item { + margin: 2px 0; + padding: 8px; + border-radius: 2px; + + &.active { + background-color: rgba($color: #000000, $alpha: 0.2); + + &:hover { + cursor: pointer; + background-color: rgba($color: #000000, $alpha: 0.3); + } + } + + &:hover { + cursor: pointer; + background-color: rgba($color: #000000, $alpha: 0.3); + } + } +} + +.time-selection-dropdown-content { + min-width: 172px; + width: 100%; +} + +.timeSelection-input { + display: flex; + gap: 8px; + height: 33px; + align-items: center; + padding: 4px 8px; + padding-left: 0px !important; + + &.custom-time { + input:not(:focus) { + min-width: 240px; + } + } + + input::placeholder { + color: white; + } + + input:focus::placeholder { + color: rgba($color: #ffffff, $alpha: 0.4); + } +} + +.valid-format-error { + margin-top: 4px; + color: var(--bg-cherry-400) !important; + font-size: 13px !important; + font-weight: 400 !important; +} + +.info-text { + display: flex; + align-items: center; + justify-content: center; + padding: 4px; + cursor: default; + color: var(--bg-vanilla-400, #c0c1c3) !important; + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: normal; + letter-spacing: 0.14px; +} + +.info-text:hover { + &.ant-btn-text { + background-color: unset !important; + } +} + +.lightMode { + .time-options-container { + .time-options-item { + &.active { + background-color: rgba($color: #ffffff, $alpha: 0.2); + + &:hover { + cursor: pointer; + background-color: rgba($color: #ffffff, $alpha: 0.3); + } + } + + &:hover { + cursor: pointer; + background-color: rgba($color: #ffffff, $alpha: 0.3); + } + } + } + + .timeSelection-input { + display: flex; + gap: 8px; + align-items: center; + padding: 4px 8px; + padding-left: 0px !important; + + input::placeholder { + color: var(---bg-ink-300); + } + + input:focus::placeholder { + color: rgba($color: #000000, $alpha: 0.4); + } + } + + .info-text { + color: var(--bg-slate-400) !important; + } +} diff --git a/signoz/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx b/signoz/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx new file mode 100644 index 0000000..a3bb980 --- /dev/null +++ b/signoz/frontend/src/components/CustomTimePicker/CustomTimePicker.tsx @@ -0,0 +1,348 @@ +/* eslint-disable jsx-a11y/click-events-have-key-events */ +/* eslint-disable jsx-a11y/no-static-element-interactions */ +import './CustomTimePicker.styles.scss'; + +import { Input, Popover, Tooltip, Typography } from 'antd'; +import cx from 'classnames'; +import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal'; +import { + FixedDurationSuggestionOptions, + Options, + RelativeDurationSuggestionOptions, +} from 'container/TopNav/DateTimeSelectionV2/config'; +import dayjs from 'dayjs'; +import { isValidTimeFormat } from 'lib/getMinMax'; +import { defaultTo, isFunction, noop } from 'lodash-es'; +import debounce from 'lodash-es/debounce'; +import { CheckCircle, ChevronDown, Clock } from 'lucide-react'; +import { + ChangeEvent, + Dispatch, + SetStateAction, + useEffect, + useState, +} from 'react'; +import { useLocation } from 'react-router-dom'; +import { popupContainer } from 'utils/selectPopupContainer'; + +import CustomTimePickerPopoverContent from './CustomTimePickerPopoverContent'; + +const maxAllowedMinTimeInMonths = 6; + +interface CustomTimePickerProps { + onSelect: (value: string) => void; + onError: (value: boolean) => void; + selectedValue: string; + selectedTime: string; + onValidCustomDateChange: ({ + time: [t1, t2], + timeStr, + }: { + time: [dayjs.Dayjs | null, dayjs.Dayjs | null]; + timeStr: string; + }) => void; + onCustomTimeStatusUpdate?: (isValid: boolean) => void; + open: boolean; + setOpen: Dispatch>; + items: any[]; + newPopover?: boolean; + customDateTimeVisible?: boolean; + setCustomDTPickerVisible?: Dispatch>; + onCustomDateHandler?: (dateTimeRange: DateTimeRangeType) => void; + handleGoLive?: () => void; +} + +function CustomTimePicker({ + onSelect, + onError, + items, + selectedValue, + selectedTime, + open, + setOpen, + onValidCustomDateChange, + onCustomTimeStatusUpdate, + newPopover, + customDateTimeVisible, + setCustomDTPickerVisible, + onCustomDateHandler, + handleGoLive, +}: CustomTimePickerProps): JSX.Element { + const [ + selectedTimePlaceholderValue, + setSelectedTimePlaceholderValue, + ] = useState('Select / Enter Time Range'); + + const [inputValue, setInputValue] = useState(''); + const [inputStatus, setInputStatus] = useState<'' | 'error' | 'success'>(''); + const [inputErrorMessage, setInputErrorMessage] = useState( + null, + ); + const location = useLocation(); + const [isInputFocused, setIsInputFocused] = useState(false); + + const getSelectedTimeRangeLabel = ( + selectedTime: string, + selectedTimeValue: string, + ): string => { + if (selectedTime === 'custom') { + return selectedTimeValue; + } + + for (let index = 0; index < Options.length; index++) { + if (Options[index].value === selectedTime) { + return Options[index].label; + } + } + + for ( + let index = 0; + index < RelativeDurationSuggestionOptions.length; + index++ + ) { + if (RelativeDurationSuggestionOptions[index].value === selectedTime) { + return RelativeDurationSuggestionOptions[index].label; + } + } + + for (let index = 0; index < FixedDurationSuggestionOptions.length; index++) { + if (FixedDurationSuggestionOptions[index].value === selectedTime) { + return FixedDurationSuggestionOptions[index].label; + } + } + + if (isValidTimeFormat(selectedTime)) { + return selectedTime; + } + + return ''; + }; + + useEffect(() => { + const value = getSelectedTimeRangeLabel(selectedTime, selectedValue); + + setSelectedTimePlaceholderValue(value); + }, [selectedTime, selectedValue]); + + const hide = (): void => { + setOpen(false); + }; + + const handleOpenChange = (newOpen: boolean): void => { + setOpen(newOpen); + if (!newOpen) { + setCustomDTPickerVisible?.(false); + } + }; + + const debouncedHandleInputChange = debounce((inputValue): void => { + const isValidFormat = /^(\d+)([mhdw])$/.test(inputValue); + if (isValidFormat) { + setInputStatus('success'); + onError(false); + setInputErrorMessage(null); + + const match = inputValue.match(/^(\d+)([mhdw])$/); + + const value = parseInt(match[1], 10); + const unit = match[2]; + + const currentTime = dayjs(); + const maxAllowedMinTime = currentTime.subtract( + maxAllowedMinTimeInMonths, + 'month', + ); + let minTime = null; + + switch (unit) { + case 'm': + minTime = currentTime.subtract(value, 'minute'); + break; + + case 'h': + minTime = currentTime.subtract(value, 'hour'); + break; + case 'd': + minTime = currentTime.subtract(value, 'day'); + break; + case 'w': + minTime = currentTime.subtract(value, 'week'); + break; + default: + break; + } + + if (minTime && (!minTime.isValid() || minTime < maxAllowedMinTime)) { + setInputStatus('error'); + onError(true); + setInputErrorMessage('Please enter time less than 6 months'); + if (isFunction(onCustomTimeStatusUpdate)) { + onCustomTimeStatusUpdate(true); + } + } else { + onValidCustomDateChange({ + time: [minTime, currentTime], + timeStr: inputValue, + }); + } + } else { + setInputStatus('error'); + onError(true); + setInputErrorMessage(null); + if (isFunction(onCustomTimeStatusUpdate)) { + onCustomTimeStatusUpdate(false); + } + } + }, 300); + + const handleInputChange = (event: ChangeEvent): void => { + const inputValue = event.target.value; + + if (inputValue.length > 0) { + setOpen(false); + } else { + setOpen(true); + } + + setInputValue(inputValue); + + // Call the debounced function with the input value + debouncedHandleInputChange(inputValue); + }; + + const handleSelect = (label: string, value: string): void => { + onSelect(value); + setSelectedTimePlaceholderValue(label); + setInputStatus(''); + onError(false); + setInputErrorMessage(null); + setInputValue(''); + if (value !== 'custom') { + hide(); + } + }; + + const content = ( +
+
+ {items?.map(({ value, label }) => ( +
{ + handleSelect(label, value); + }} + key={value} + className={cx( + 'time-options-item', + selectedValue === value ? 'active' : '', + )} + > + {label} +
+ ))} +
+
+ ); + + const handleFocus = (): void => { + setIsInputFocused(true); + }; + + const handleBlur = (): void => { + setIsInputFocused(false); + }; + + // this is required as TopNav component wraps the components and we need to clear the state on path change + useEffect(() => { + setInputStatus(''); + onError(false); + setInputErrorMessage(null); + setInputValue(''); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [location.pathname]); + + return ( +
+ + ) : ( + content + ) + } + arrow={false} + trigger="click" + open={open} + onOpenChange={handleOpenChange} + style={{ + padding: 0, + }} + > + + ) : ( + + + + ) + } + suffix={ + { + setOpen(!open); + }} + /> + } + /> + + + {inputStatus === 'error' && inputErrorMessage && ( + + {inputErrorMessage} + + )} +
+ ); +} + +export default CustomTimePicker; + +CustomTimePicker.defaultProps = { + newPopover: false, + customDateTimeVisible: false, + setCustomDTPickerVisible: noop, + onCustomDateHandler: noop, + handleGoLive: noop, + onCustomTimeStatusUpdate: noop, +}; diff --git a/signoz/frontend/src/components/CustomTimePicker/CustomTimePickerPopoverContent.tsx b/signoz/frontend/src/components/CustomTimePicker/CustomTimePickerPopoverContent.tsx new file mode 100644 index 0000000..4a41bec --- /dev/null +++ b/signoz/frontend/src/components/CustomTimePicker/CustomTimePickerPopoverContent.tsx @@ -0,0 +1,118 @@ +import './CustomTimePicker.styles.scss'; + +import { Button } from 'antd'; +import cx from 'classnames'; +import ROUTES from 'constants/routes'; +import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal'; +import { + LexicalContext, + Option, + RelativeDurationSuggestionOptions, +} from 'container/TopNav/DateTimeSelectionV2/config'; +import { Dispatch, SetStateAction, useMemo } from 'react'; +import { useLocation } from 'react-router-dom'; + +import RangePickerModal from './RangePickerModal'; + +interface CustomTimePickerPopoverContentProps { + options: any[]; + setIsOpen: Dispatch>; + customDateTimeVisible: boolean; + setCustomDTPickerVisible: Dispatch>; + onCustomDateHandler: ( + dateTimeRange: DateTimeRangeType, + lexicalContext?: LexicalContext, + ) => void; + onSelectHandler: (label: string, value: string) => void; + handleGoLive: () => void; + selectedTime: string; +} + +function CustomTimePickerPopoverContent({ + options, + setIsOpen, + customDateTimeVisible, + setCustomDTPickerVisible, + onCustomDateHandler, + onSelectHandler, + handleGoLive, + selectedTime, +}: CustomTimePickerPopoverContentProps): JSX.Element { + const { pathname } = useLocation(); + + const isLogsExplorerPage = useMemo(() => pathname === ROUTES.LOGS_EXPLORER, [ + pathname, + ]); + + function getTimeChips(options: Option[]): JSX.Element { + return ( +
+ {options.map((option) => ( + + ))} +
+ ); + } + + return ( +
+
+ {isLogsExplorerPage && ( + + )} + {options.map((option) => ( + + ))} +
+
+ {selectedTime === 'custom' || customDateTimeVisible ? ( + + ) : ( +
+
RELATIVE TIMES
+
{getTimeChips(RelativeDurationSuggestionOptions)}
+
+ )} +
+
+ ); +} + +export default CustomTimePickerPopoverContent; diff --git a/signoz/frontend/src/components/CustomTimePicker/RangePickerModal.styles.scss b/signoz/frontend/src/components/CustomTimePicker/RangePickerModal.styles.scss new file mode 100644 index 0000000..58ebe06 --- /dev/null +++ b/signoz/frontend/src/components/CustomTimePicker/RangePickerModal.styles.scss @@ -0,0 +1,4 @@ +.custom-date-picker { + display: flex; + flex-direction: column; +} diff --git a/signoz/frontend/src/components/CustomTimePicker/RangePickerModal.tsx b/signoz/frontend/src/components/CustomTimePicker/RangePickerModal.tsx new file mode 100644 index 0000000..24ba0e2 --- /dev/null +++ b/signoz/frontend/src/components/CustomTimePicker/RangePickerModal.tsx @@ -0,0 +1,68 @@ +import './RangePickerModal.styles.scss'; + +import { DatePicker } from 'antd'; +import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal'; +import { LexicalContext } from 'container/TopNav/DateTimeSelectionV2/config'; +import dayjs, { Dayjs } from 'dayjs'; +import { Dispatch, SetStateAction } from 'react'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +interface RangePickerModalProps { + setCustomDTPickerVisible: Dispatch>; + setIsOpen: Dispatch>; + onCustomDateHandler: ( + dateTimeRange: DateTimeRangeType, + lexicalContext?: LexicalContext | undefined, + ) => void; + selectedTime: string; +} + +function RangePickerModal(props: RangePickerModalProps): JSX.Element { + const { + setCustomDTPickerVisible, + setIsOpen, + onCustomDateHandler, + selectedTime, + } = props; + const { RangePicker } = DatePicker; + const { maxTime, minTime } = useSelector( + (state) => state.globalTime, + ); + + const disabledDate = (current: Dayjs): boolean => { + const currentDay = dayjs(current); + return currentDay.isAfter(dayjs()); + }; + + const onPopoverClose = (visible: boolean): void => { + if (!visible) { + setCustomDTPickerVisible(false); + } + setIsOpen(visible); + }; + + const onModalOkHandler = (date_time: any): void => { + if (date_time?.[1]) { + onPopoverClose(false); + } + onCustomDateHandler(date_time, LexicalContext.CUSTOM_DATE_PICKER); + }; + return ( +
+ +
+ ); +} + +export default RangePickerModal; diff --git a/signoz/frontend/src/components/DraggableTableRow/index.tsx b/signoz/frontend/src/components/DraggableTableRow/index.tsx new file mode 100644 index 0000000..7c4a483 --- /dev/null +++ b/signoz/frontend/src/components/DraggableTableRow/index.tsx @@ -0,0 +1,54 @@ +import React, { useCallback, useRef } from 'react'; +import { useDrag, useDrop } from 'react-dnd'; + +import { dragHandler, dropHandler } from './utils'; + +const type = 'DraggableTableRow'; + +function DraggableTableRow({ + index, + moveRow, + className, + style, + ...restProps +}: DraggableTableRowProps): JSX.Element { + const ref = useRef(null); + + const handleDrop = useCallback( + (item: { index: number }) => { + if (moveRow) moveRow(item.index, index); + }, + [moveRow, index], + ); + + const [, drop] = useDrop({ + accept: type, + collect: dropHandler, + drop: handleDrop, + }); + + const [, drag] = useDrag({ + type, + item: { index }, + collect: dragHandler, + }); + drop(drag(ref)); + + return ( + + ); +} + +interface DraggableTableRowProps + extends React.HTMLAttributes { + index: number; + moveRow: (dragIndex: number, hoverIndex: number) => void; +} + +export default DraggableTableRow; diff --git a/signoz/frontend/src/components/DraggableTableRow/tests/DraggableTableRow.test.tsx b/signoz/frontend/src/components/DraggableTableRow/tests/DraggableTableRow.test.tsx new file mode 100644 index 0000000..f938a19 --- /dev/null +++ b/signoz/frontend/src/components/DraggableTableRow/tests/DraggableTableRow.test.tsx @@ -0,0 +1,38 @@ +import { render } from '@testing-library/react'; +import { Table } from 'antd'; +import { matchMedia } from 'container/PipelinePage/tests/AddNewPipeline.test'; +import { I18nextProvider } from 'react-i18next'; +import { Provider } from 'react-redux'; +import i18n from 'ReactI18'; +import store from 'store'; + +import DraggableTableRow from '..'; + +beforeAll(() => { + matchMedia(); +}); + +jest.mock('react-dnd', () => ({ + useDrop: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]), + useDrag: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]), +})); + +describe('DraggableTableRow Snapshot test', () => { + it('should render DraggableTableRow', async () => { + const { asFragment } = render( + + + + + , + ); + expect(asFragment()).toMatchSnapshot(); + }); +}); diff --git a/signoz/frontend/src/components/DraggableTableRow/tests/__snapshots__/DraggableTableRow.test.tsx.snap b/signoz/frontend/src/components/DraggableTableRow/tests/__snapshots__/DraggableTableRow.test.tsx.snap new file mode 100644 index 0000000..984d943 --- /dev/null +++ b/signoz/frontend/src/components/DraggableTableRow/tests/__snapshots__/DraggableTableRow.test.tsx.snap @@ -0,0 +1,103 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`DraggableTableRow Snapshot test should render DraggableTableRow 1`] = ` + +
+
+
+
+
+
+
+ + + + + + + + + + +
+
+
+
+ + + + + + + + + +
+
+ No data +
+
+
+ + + + + + + +`; + +exports[`PipelinePage container test should render AddNewPipeline section 1`] = ``; diff --git a/signoz/frontend/src/components/DraggableTableRow/tests/utils.test.ts b/signoz/frontend/src/components/DraggableTableRow/tests/utils.test.ts new file mode 100644 index 0000000..8085494 --- /dev/null +++ b/signoz/frontend/src/components/DraggableTableRow/tests/utils.test.ts @@ -0,0 +1,44 @@ +import { dragHandler, dropHandler } from '../utils'; + +jest.mock('react-dnd', () => ({ + useDrop: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]), + useDrag: jest.fn().mockImplementation(() => [jest.fn(), jest.fn(), jest.fn()]), +})); + +describe('Utils testing of DraggableTableRow component', () => { + test('Should dropHandler return true', () => { + const monitor = { + isOver: jest.fn().mockReturnValueOnce(true), + } as never; + const dropDataTruthy = dropHandler(monitor); + + expect(dropDataTruthy).toEqual({ isOver: true }); + }); + + test('Should dropHandler return false', () => { + const monitor = { + isOver: jest.fn().mockReturnValueOnce(false), + } as never; + const dropDataFalsy = dropHandler(monitor); + + expect(dropDataFalsy).toEqual({ isOver: false }); + }); + + test('Should dragHandler return true', () => { + const monitor = { + isDragging: jest.fn().mockReturnValueOnce(true), + } as never; + const dragDataTruthy = dragHandler(monitor); + + expect(dragDataTruthy).toEqual({ isDragging: true }); + }); + + test('Should dragHandler return false', () => { + const monitor = { + isDragging: jest.fn().mockReturnValueOnce(false), + } as never; + const dragDataFalsy = dragHandler(monitor); + + expect(dragDataFalsy).toEqual({ isDragging: false }); + }); +}); diff --git a/signoz/frontend/src/components/DraggableTableRow/utils.ts b/signoz/frontend/src/components/DraggableTableRow/utils.ts new file mode 100644 index 0000000..475145f --- /dev/null +++ b/signoz/frontend/src/components/DraggableTableRow/utils.ts @@ -0,0 +1,15 @@ +import { DragSourceMonitor, DropTargetMonitor } from 'react-dnd'; + +export function dropHandler(monitor: DropTargetMonitor): { isOver: boolean } { + return { + isOver: monitor.isOver(), + }; +} + +export function dragHandler( + monitor: DragSourceMonitor, +): { isDragging: boolean } { + return { + isDragging: monitor.isDragging(), + }; +} diff --git a/signoz/frontend/src/components/DropDown/DropDown.styles.scss b/signoz/frontend/src/components/DropDown/DropDown.styles.scss new file mode 100644 index 0000000..232b349 --- /dev/null +++ b/signoz/frontend/src/components/DropDown/DropDown.styles.scss @@ -0,0 +1,11 @@ +.dropdown-button { + color: #fff; +} + +.dropdown-button--dark { + color: #000; +} + +.dropdown-icon { + font-size: 1.2rem; +} \ No newline at end of file diff --git a/signoz/frontend/src/components/DropDown/DropDown.tsx b/signoz/frontend/src/components/DropDown/DropDown.tsx new file mode 100644 index 0000000..e847e89 --- /dev/null +++ b/signoz/frontend/src/components/DropDown/DropDown.tsx @@ -0,0 +1,54 @@ +import './DropDown.styles.scss'; + +import { EllipsisOutlined } from '@ant-design/icons'; +import { Button, Dropdown, MenuProps } from 'antd'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { useState } from 'react'; + +function DropDown({ + element, + onDropDownItemClick, +}: { + element: JSX.Element[]; + onDropDownItemClick?: MenuProps['onClick']; +}): JSX.Element { + const isDarkMode = useIsDarkMode(); + + const items: MenuProps['items'] = element.map( + (e: JSX.Element, index: number) => ({ + label: e, + key: index, + }), + ); + + const [isDdOpen, setDdOpen] = useState(false); + + return ( + setDdOpen(true), + onMouseLeave: (): void => setDdOpen(false), + onClick: (item): void => onDropDownItemClick?.(item), + }} + open={isDdOpen} + > + + + ); +} + +DropDown.defaultProps = { + onDropDownItemClick: (): void => {}, +}; + +export default DropDown; diff --git a/signoz/frontend/src/components/Editor/Editor.test.tsx b/signoz/frontend/src/components/Editor/Editor.test.tsx new file mode 100644 index 0000000..ec9e505 --- /dev/null +++ b/signoz/frontend/src/components/Editor/Editor.test.tsx @@ -0,0 +1,59 @@ +import { render, screen } from '@testing-library/react'; +import { useIsDarkMode } from 'hooks/useDarkMode'; + +import Editor from './index'; + +jest.mock('hooks/useDarkMode', () => ({ + useIsDarkMode: jest.fn(), +})); + +describe('Editor', () => { + it('renders correctly with default props', () => { + const { container } = render(); + expect(container).toMatchSnapshot(); + }); + + it('renders correctly with custom props', () => { + const customProps = { + value: 'test', + language: 'javascript', + readOnly: true, + height: '50vh', + options: { minimap: { enabled: false } }, + }; + const { container } = render( + , + ); + expect(container).toMatchSnapshot(); + }); + + it('renders with dark mode theme', () => { + (useIsDarkMode as jest.Mock).mockImplementation(() => true); + + const { container } = render(); + + expect(container).toMatchSnapshot(); + }); + + it('renders with light mode theme', () => { + (useIsDarkMode as jest.Mock).mockImplementation(() => false); + + const { container } = render(); + + expect(container).toMatchSnapshot(); + }); + + it('displays "Loading..." message initially', () => { + const { rerender } = render(); + + expect(screen.getByText('Loading...')).toBeInTheDocument(); + + rerender(); + }); +}); diff --git a/signoz/frontend/src/components/Editor/__snapshots__/Editor.test.tsx.snap b/signoz/frontend/src/components/Editor/__snapshots__/Editor.test.tsx.snap new file mode 100644 index 0000000..1670ced --- /dev/null +++ b/signoz/frontend/src/components/Editor/__snapshots__/Editor.test.tsx.snap @@ -0,0 +1,69 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Editor renders correctly with custom props 1`] = ` +
+
+
+ Loading... +
+
+
+
+`; + +exports[`Editor renders correctly with default props 1`] = ` +
+
+
+ Loading... +
+
+
+
+`; + +exports[`Editor renders with dark mode theme 1`] = ` +
+
+
+ Loading... +
+
+
+
+`; + +exports[`Editor renders with light mode theme 1`] = ` +
+
+
+ Loading... +
+
+
+
+`; diff --git a/signoz/frontend/src/components/Editor/index.tsx b/signoz/frontend/src/components/Editor/index.tsx new file mode 100644 index 0000000..5f70d92 --- /dev/null +++ b/signoz/frontend/src/components/Editor/index.tsx @@ -0,0 +1,56 @@ +import MEditor, { EditorProps } from '@monaco-editor/react'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { useMemo } from 'react'; + +function Editor({ + value, + language, + onChange, + readOnly, + height, + options, +}: MEditorProps): JSX.Element { + const isDarkMode = useIsDarkMode(); + + const onChangeHandler = (newValue?: string): void => { + if (readOnly) return; + + if (typeof newValue === 'string' && onChange) onChange(newValue); + }; + + const editorOptions = useMemo( + () => ({ fontSize: 16, automaticLayout: true, readOnly, ...options }), + [options, readOnly], + ); + + return ( + + ); +} + +interface MEditorProps { + value: string; + language?: string; + onChange?: (value: string) => void; + readOnly?: boolean; + height?: string; + options?: EditorProps['options']; +} + +Editor.defaultProps = { + language: 'yaml', + readOnly: false, + height: '40vh', + options: {}, + onChange: (): void => {}, +}; + +export default Editor; diff --git a/signoz/frontend/src/components/ExplorerCard/ExplorerCard.tsx b/signoz/frontend/src/components/ExplorerCard/ExplorerCard.tsx new file mode 100644 index 0000000..73d7a24 --- /dev/null +++ b/signoz/frontend/src/components/ExplorerCard/ExplorerCard.tsx @@ -0,0 +1,252 @@ +import { + DeleteOutlined, + MoreOutlined, + SaveOutlined, + ShareAltOutlined, +} from '@ant-design/icons'; +import { + Button, + Col, + Dropdown, + MenuProps, + Popover, + Row, + Select, + Space, + Typography, +} from 'antd'; +import axios from 'axios'; +import TextToolTip from 'components/TextToolTip'; +import { SOMETHING_WENT_WRONG } from 'constants/api'; +import { QueryParams } from 'constants/query'; +import { useGetSearchQueryParam } from 'hooks/queryBuilder/useGetSearchQueryParam'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { useDeleteView } from 'hooks/saveViews/useDeleteView'; +import { useGetAllViews } from 'hooks/saveViews/useGetAllViews'; +import { useUpdateView } from 'hooks/saveViews/useUpdateView'; +import useErrorNotification from 'hooks/useErrorNotification'; +import { useNotifications } from 'hooks/useNotifications'; +import { mapCompositeQueryFromQuery } from 'lib/newQueryBuilder/queryBuilderMappers/mapCompositeQueryFromQuery'; +import { useState } from 'react'; +import { useCopyToClipboard } from 'react-use'; +import { popupContainer } from 'utils/selectPopupContainer'; + +import { ExploreHeaderToolTip, SaveButtonText } from './constants'; +import MenuItemGenerator from './MenuItemGenerator'; +import SaveViewWithName from './SaveViewWithName'; +import { + DropDownOverlay, + ExplorerCardHeadContainer, + OffSetCol, +} from './styles'; +import { ExplorerCardProps } from './types'; +import { deleteViewHandler } from './utils'; + +function ExplorerCard({ + sourcepage, + children, +}: ExplorerCardProps): JSX.Element { + const [isOpen, setIsOpen] = useState(false); + const [, setCopyUrl] = useCopyToClipboard(); + const { notifications } = useNotifications(); + + const onCopyUrlHandler = (): void => { + setCopyUrl(window.location.href); + notifications.success({ + message: 'Copied to clipboard', + }); + }; + + const { + currentQuery, + panelType, + redirectWithQueryBuilderData, + updateAllQueriesOperators, + isStagedQueryUpdated, + } = useQueryBuilder(); + + const { + data: viewsData, + isLoading, + error, + isRefetching, + refetch: refetchAllView, + } = useGetAllViews(sourcepage); + + useErrorNotification(error); + + const handleOpenChange = (newOpen = false): void => { + setIsOpen(newOpen); + }; + + const viewName = useGetSearchQueryParam(QueryParams.viewName) || ''; + + const viewKey = useGetSearchQueryParam(QueryParams.viewKey) || ''; + + const isQueryUpdated = isStagedQueryUpdated(viewsData?.data?.data, viewKey); + + const { mutateAsync: updateViewAsync } = useUpdateView({ + compositeQuery: mapCompositeQueryFromQuery(currentQuery, panelType), + viewKey, + extraData: '', + sourcePage: sourcepage, + viewName, + }); + + const { mutateAsync: deleteViewAsync } = useDeleteView(viewKey); + + const showErrorNotification = (err: Error): void => { + notifications.error({ + message: axios.isAxiosError(err) ? err.message : SOMETHING_WENT_WRONG, + }); + }; + + const onDeleteHandler = (): void => + deleteViewHandler({ + deleteViewAsync, + notifications, + panelType, + redirectWithQueryBuilderData, + refetchAllView, + viewId: viewKey, + viewKey, + updateAllQueriesOperators, + sourcePage: sourcepage, + }); + + const onUpdateQueryHandler = (): void => { + updateViewAsync( + { + compositeQuery: mapCompositeQueryFromQuery(currentQuery, panelType), + viewKey, + extraData: '', + sourcePage: sourcepage, + viewName, + }, + { + onSuccess: () => { + notifications.success({ + message: 'View Updated Successfully', + }); + refetchAllView(); + }, + onError: (err) => { + showErrorNotification(err); + }, + }, + ); + }; + + const moreOptionMenu: MenuProps = { + items: [ + { + key: 'delete', + label: Delete, + onClick: onDeleteHandler, + icon: , + }, + ], + }; + + const saveButtonType = isQueryUpdated ? 'default' : 'primary'; + const saveButtonIcon = isQueryUpdated ? null : ; + + const showSaveView = false; + + return ( + <> + {showSaveView && ( + + + + + Query Builder + + + + + + {viewsData?.data.data && viewsData?.data.data.length && ( + + + + )} + {isQueryUpdated && ( + + )} + + } + showArrow={false} + open={isOpen} + onOpenChange={handleOpenChange} + > + + + + {viewKey && ( + + + + )} + + + + + )} + +
{children}
+ + ); +} + +export default ExplorerCard; diff --git a/signoz/frontend/src/components/ExplorerCard/MenuItemGenerator.tsx b/signoz/frontend/src/components/ExplorerCard/MenuItemGenerator.tsx new file mode 100644 index 0000000..c908e70 --- /dev/null +++ b/signoz/frontend/src/components/ExplorerCard/MenuItemGenerator.tsx @@ -0,0 +1,103 @@ +import { DeleteOutlined } from '@ant-design/icons'; +import { Col, Row, Tooltip, Typography } from 'antd'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { useDeleteView } from 'hooks/saveViews/useDeleteView'; +import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange'; +import { useNotifications } from 'hooks/useNotifications'; +import { MouseEvent, useCallback } from 'react'; + +import { MenuItemContainer } from './styles'; +import { MenuItemLabelGeneratorProps } from './types'; +import { + deleteViewHandler, + getViewDetailsUsingViewKey, + trimViewName, +} from './utils'; + +function MenuItemGenerator({ + viewName, + viewKey, + createdBy, + uuid, + viewData, + refetchAllView, + sourcePage, +}: MenuItemLabelGeneratorProps): JSX.Element { + const { + panelType, + redirectWithQueryBuilderData, + updateAllQueriesOperators, + } = useQueryBuilder(); + const { handleExplorerTabChange } = useHandleExplorerTabChange(); + + const { notifications } = useNotifications(); + + const { mutateAsync: deleteViewAsync } = useDeleteView(uuid); + + const onDeleteHandler = (event: MouseEvent): void => { + event.stopPropagation(); + deleteViewHandler({ + deleteViewAsync, + notifications, + panelType, + redirectWithQueryBuilderData, + refetchAllView, + viewId: uuid, + viewKey, + updateAllQueriesOperators, + sourcePage, + }); + }; + + const onMenuItemSelectHandler = useCallback( + ({ key }: { key: string }): void => { + const currentViewDetails = getViewDetailsUsingViewKey(key, viewData); + if (!currentViewDetails) return; + const { + query, + name, + uuid, + panelType: currentPanelType, + } = currentViewDetails; + + handleExplorerTabChange(currentPanelType, { + query, + name, + uuid, + }); + }, + [viewData, handleExplorerTabChange], + ); + + const onLabelClickHandler = (): void => { + onMenuItemSelectHandler({ + key: uuid, + }); + }; + + const newViewName = trimViewName(viewName); + + return ( + + + + + + {newViewName} + + + + Created by {createdBy} + + + + + + + + + + ); +} + +export default MenuItemGenerator; diff --git a/signoz/frontend/src/components/ExplorerCard/SaveViewWithName.tsx b/signoz/frontend/src/components/ExplorerCard/SaveViewWithName.tsx new file mode 100644 index 0000000..cb5457c --- /dev/null +++ b/signoz/frontend/src/components/ExplorerCard/SaveViewWithName.tsx @@ -0,0 +1,80 @@ +import { Card, Form, Input, Typography } from 'antd'; +import { PANEL_TYPES } from 'constants/queryBuilder'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { useSaveView } from 'hooks/saveViews/useSaveView'; +import { useNotifications } from 'hooks/useNotifications'; +import { mapCompositeQueryFromQuery } from 'lib/newQueryBuilder/queryBuilderMappers/mapCompositeQueryFromQuery'; +import { useTranslation } from 'react-i18next'; + +import { SaveButton } from './styles'; +import { SaveViewFormProps, SaveViewWithNameProps } from './types'; +import { saveViewHandler } from './utils'; + +function SaveViewWithName({ + sourcePage, + handlePopOverClose, + refetchAllView, +}: SaveViewWithNameProps): JSX.Element { + const [form] = Form.useForm(); + const { t } = useTranslation(['explorer']); + const { + currentQuery, + panelType, + redirectWithQueryBuilderData, + } = useQueryBuilder(); + const { notifications } = useNotifications(); + const compositeQuery = mapCompositeQueryFromQuery(currentQuery, panelType); + + const { isLoading, mutateAsync: saveViewAsync } = useSaveView({ + viewName: form.getFieldValue('viewName'), + compositeQuery, + sourcePage, + extraData: '', + }); + + const onSaveHandler = (): void => { + saveViewHandler({ + compositeQuery, + handlePopOverClose, + extraData: '', + notifications, + panelType: panelType || PANEL_TYPES.LIST, + redirectWithQueryBuilderData, + refetchAllView, + saveViewAsync, + sourcePage, + viewName: form.getFieldValue('viewName'), + form, + }); + }; + + return ( + + {t('name_of_the_view')} +
+ + + + + Save + +
+
+ ); +} + +export default SaveViewWithName; diff --git a/signoz/frontend/src/components/ExplorerCard/__mock__/viewData.ts b/signoz/frontend/src/components/ExplorerCard/__mock__/viewData.ts new file mode 100644 index 0000000..5423cc9 --- /dev/null +++ b/signoz/frontend/src/components/ExplorerCard/__mock__/viewData.ts @@ -0,0 +1,32 @@ +import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery'; +import { ViewProps } from 'types/api/saveViews/types'; +import { DataSource } from 'types/common/queryBuilder'; + +export const viewMockData: ViewProps[] = [ + { + uuid: 'view1', + name: 'View 1', + createdBy: 'User 1', + category: 'category 1', + compositeQuery: {} as ICompositeMetricQuery, + createdAt: '2021-07-07T06:31:00.000Z', + updatedAt: '2021-07-07T06:33:00.000Z', + extraData: '', + sourcePage: DataSource.TRACES, + tags: [], + updatedBy: 'User 1', + }, + { + uuid: 'view2', + name: 'View 2', + createdBy: 'User 2', + category: 'category 2', + compositeQuery: {} as ICompositeMetricQuery, + createdAt: '2021-07-07T06:30:00.000Z', + updatedAt: '2021-07-07T06:30:00.000Z', + extraData: '', + sourcePage: DataSource.TRACES, + tags: [], + updatedBy: 'User 2', + }, +]; diff --git a/signoz/frontend/src/components/ExplorerCard/constants.ts b/signoz/frontend/src/components/ExplorerCard/constants.ts new file mode 100644 index 0000000..ee06168 --- /dev/null +++ b/signoz/frontend/src/components/ExplorerCard/constants.ts @@ -0,0 +1,14 @@ +import { QueryParams } from 'constants/query'; + +export const ExploreHeaderToolTip = { + url: + 'https://signoz.io/docs/userguide/query-builder/?utm_source=product&utm_medium=new-query-builder', + text: 'More details on how to use query builder', +}; + +export const SaveButtonText = { + SAVE_AS_NEW_VIEW: 'Save as new view', + SAVE_VIEW: 'Save view', +}; + +export type QuerySearchParamNames = QueryParams.viewName | QueryParams.viewKey; diff --git a/signoz/frontend/src/components/ExplorerCard/styles.ts b/signoz/frontend/src/components/ExplorerCard/styles.ts new file mode 100644 index 0000000..56c3d38 --- /dev/null +++ b/signoz/frontend/src/components/ExplorerCard/styles.ts @@ -0,0 +1,29 @@ +import { Button, Card, Col } from 'antd'; +import styled, { CSSProperties } from 'styled-components'; + +export const ExplorerCardHeadContainer = styled(Card)` + margin: 1rem 0; + padding: 0; +`; + +export const OffSetCol = styled(Col)` + text-align: right; +`; + +export const SaveButton = styled(Button)` + &&& { + margin: 1rem 0; + width: 5rem; + } +`; + +export const DropDownOverlay: CSSProperties = { + maxHeight: '20rem', + overflowY: 'auto', + width: '20rem', + padding: 0, +}; + +export const MenuItemContainer = styled(Card)` + padding: 0; +`; diff --git a/signoz/frontend/src/components/ExplorerCard/test/ExplorerCard.test.tsx b/signoz/frontend/src/components/ExplorerCard/test/ExplorerCard.test.tsx new file mode 100644 index 0000000..1fdb29d --- /dev/null +++ b/signoz/frontend/src/components/ExplorerCard/test/ExplorerCard.test.tsx @@ -0,0 +1,60 @@ +import { render, screen } from '@testing-library/react'; +import ROUTES from 'constants/routes'; +import MockQueryClientProvider from 'providers/test/MockQueryClientProvider'; +import { DataSource } from 'types/common/queryBuilder'; + +import { viewMockData } from '../__mock__/viewData'; +import ExplorerCard from '../ExplorerCard'; + +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useLocation: (): { pathname: string } => ({ + pathname: `${process.env.FRONTEND_API_ENDPOINT}/${ROUTES.TRACES_EXPLORER}/`, + }), +})); + +jest.mock('hooks/queryBuilder/useGetPanelTypesQueryParam', () => ({ + useGetPanelTypesQueryParam: jest.fn(() => 'mockedPanelType'), +})); + +jest.mock('hooks/saveViews/useGetAllViews', () => ({ + useGetAllViews: jest.fn(() => ({ + data: { data: { data: viewMockData } }, + isLoading: false, + error: null, + isRefetching: false, + refetch: jest.fn(), + })), +})); + +jest.mock('hooks/saveViews/useUpdateView', () => ({ + useUpdateView: jest.fn(() => ({ + mutateAsync: jest.fn(), + })), +})); + +jest.mock('hooks/saveViews/useDeleteView', () => ({ + useDeleteView: jest.fn(() => ({ + mutateAsync: jest.fn(), + })), +})); + +describe('ExplorerCard', () => { + it('renders a card with a title and a description', () => { + render( + + child + , + ); + expect(screen.queryByText('Query Builder')).not.toBeInTheDocument(); + }); + + it('renders a save view button', () => { + render( + + child + , + ); + expect(screen.queryByText('Save view')).not.toBeInTheDocument(); + }); +}); diff --git a/signoz/frontend/src/components/ExplorerCard/test/MenuItemGenerator.test.tsx b/signoz/frontend/src/components/ExplorerCard/test/MenuItemGenerator.test.tsx new file mode 100644 index 0000000..c869024 --- /dev/null +++ b/signoz/frontend/src/components/ExplorerCard/test/MenuItemGenerator.test.tsx @@ -0,0 +1,62 @@ +import { render, screen } from '@testing-library/react'; +import ROUTES from 'constants/routes'; +import MockQueryClientProvider from 'providers/test/MockQueryClientProvider'; +import { DataSource } from 'types/common/queryBuilder'; + +import { viewMockData } from '../__mock__/viewData'; +import MenuItemGenerator from '../MenuItemGenerator'; + +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useLocation: (): { pathname: string } => ({ + pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.APPLICATION}/`, + }), +})); + +jest.mock('antd/es/form/Form', () => ({ + useForm: jest.fn().mockReturnValue({ + onFinish: jest.fn(), + }), +})); + +describe('MenuItemGenerator', () => { + it('should render MenuItemGenerator component', () => { + const screen = render( + + + , + ); + + expect(screen.getByText(viewMockData[0].name)).toBeInTheDocument(); + }); + + it('should call onMenuItemSelectHandler on click of MenuItemGenerator', () => { + render( + + + , + ); + + const spanElement = screen.getByRole('img', { + name: 'delete', + }); + + expect(spanElement).toBeInTheDocument(); + }); +}); diff --git a/signoz/frontend/src/components/ExplorerCard/test/SaveViewWithName.test.tsx b/signoz/frontend/src/components/ExplorerCard/test/SaveViewWithName.test.tsx new file mode 100644 index 0000000..8e6664a --- /dev/null +++ b/signoz/frontend/src/components/ExplorerCard/test/SaveViewWithName.test.tsx @@ -0,0 +1,63 @@ +import { fireEvent, render } from '@testing-library/react'; +import ROUTES from 'constants/routes'; +import { QueryClient, QueryClientProvider } from 'react-query'; +import { DataSource } from 'types/common/queryBuilder'; + +import SaveViewWithName from '../SaveViewWithName'; + +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useLocation: (): { pathname: string } => ({ + pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.APPLICATION}/`, + }), +})); + +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + refetchOnWindowFocus: false, + }, + }, +}); + +jest.mock('hooks/queryBuilder/useGetPanelTypesQueryParam', () => ({ + useGetPanelTypesQueryParam: jest.fn(() => 'mockedPanelType'), +})); + +jest.mock('hooks/saveViews/useSaveView', () => ({ + useSaveView: jest.fn(() => ({ + mutateAsync: jest.fn(), + })), +})); + +describe('SaveViewWithName', () => { + it('should render SaveViewWithName component', () => { + const screen = render( + + + , + ); + + expect(screen.getByText('Save')).toBeInTheDocument(); + }); + + it('should call saveViewAsync on click of Save button', () => { + const screen = render( + + + , + ); + + fireEvent.click(screen.getByText('Save')); + + expect(screen.getByText('Save')).toBeInTheDocument(); + }); +}); diff --git a/signoz/frontend/src/components/ExplorerCard/types.ts b/signoz/frontend/src/components/ExplorerCard/types.ts new file mode 100644 index 0000000..9f4eed3 --- /dev/null +++ b/signoz/frontend/src/components/ExplorerCard/types.ts @@ -0,0 +1,84 @@ +import { FormInstance } from 'antd'; +import { NotificationInstance } from 'antd/es/notification/interface'; +import { AxiosResponse } from 'axios'; +import { PANEL_TYPES } from 'constants/queryBuilder'; +import { UseMutateAsyncFunction } from 'react-query'; +import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; +import { + DeleteViewPayloadProps, + SaveViewPayloadProps, + SaveViewProps, + ViewProps, +} from 'types/api/saveViews/types'; +import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder'; + +export interface ExplorerCardProps { + sourcepage: DataSource; + children: React.ReactNode; +} + +export type GetViewDetailsUsingViewKey = ( + viewKey: string, + data: ViewProps[] | undefined, +) => + | { query: Query; name: string; uuid: string; panelType: PANEL_TYPES } + | undefined; + +export interface IsQueryUpdatedInViewProps { + viewKey: string; + data: ViewProps[] | undefined; + stagedQuery: Query | null; + currentPanelType: PANEL_TYPES | null; +} + +export interface SaveViewWithNameProps { + sourcePage: ExplorerCardProps['sourcepage']; + handlePopOverClose: VoidFunction; + refetchAllView: VoidFunction; +} + +export interface SaveViewFormProps { + viewName: string; +} + +export interface MenuItemLabelGeneratorProps { + viewName: string; + viewKey: string; + createdBy: string; + uuid: string; + viewData: ViewProps[]; + refetchAllView: VoidFunction; + sourcePage: ExplorerCardProps['sourcepage']; +} + +export interface SaveViewHandlerProps { + viewName: string; + compositeQuery: ICompositeMetricQuery; + sourcePage: ExplorerCardProps['sourcepage']; + extraData: string; + panelType: PANEL_TYPES | null; + notifications: NotificationInstance; + refetchAllView: SaveViewWithNameProps['refetchAllView']; + saveViewAsync: UseMutateAsyncFunction< + AxiosResponse, + Error, + SaveViewProps, + SaveViewPayloadProps + >; + handlePopOverClose: SaveViewWithNameProps['handlePopOverClose']; + redirectWithQueryBuilderData: QueryBuilderContextType['redirectWithQueryBuilderData']; + form: FormInstance; +} + +export interface DeleteViewHandlerProps { + deleteViewAsync: UseMutateAsyncFunction; + refetchAllView: MenuItemLabelGeneratorProps['refetchAllView']; + redirectWithQueryBuilderData: QueryBuilderContextType['redirectWithQueryBuilderData']; + notifications: NotificationInstance; + panelType: PANEL_TYPES | null; + viewKey: string; + viewId: string; + updateAllQueriesOperators: QueryBuilderContextType['updateAllQueriesOperators']; + sourcePage: ExplorerCardProps['sourcepage']; +} diff --git a/signoz/frontend/src/components/ExplorerCard/utils.ts b/signoz/frontend/src/components/ExplorerCard/utils.ts new file mode 100644 index 0000000..48af06b --- /dev/null +++ b/signoz/frontend/src/components/ExplorerCard/utils.ts @@ -0,0 +1,195 @@ +import { NotificationInstance } from 'antd/es/notification/interface'; +import axios from 'axios'; +import { SOMETHING_WENT_WRONG } from 'constants/api'; +import { QueryParams } from 'constants/query'; +import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder'; +import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi'; +import isEqual from 'lodash-es/isEqual'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; + +import { + DeleteViewHandlerProps, + GetViewDetailsUsingViewKey, + IsQueryUpdatedInViewProps, + SaveViewHandlerProps, +} from './types'; + +export const showErrorNotification = ( + notifications: NotificationInstance, + err: Error, +): void => { + notifications.error({ + message: axios.isAxiosError(err) ? err.message : SOMETHING_WENT_WRONG, + }); +}; + +export const getViewDetailsUsingViewKey: GetViewDetailsUsingViewKey = ( + viewKey, + data, +) => { + const selectedView = data?.find((view) => view.uuid === viewKey); + if (selectedView) { + const { compositeQuery, name, uuid } = selectedView; + const query = mapQueryDataFromApi(compositeQuery); + return { query, name, uuid, panelType: compositeQuery.panelType }; + } + return undefined; +}; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export const omitIdFromQuery = (query: Query | null): any => ({ + ...query, + builder: { + ...query?.builder, + queryData: query?.builder.queryData.map((queryData) => { + const { id, ...rest } = queryData.aggregateAttribute; + const newAggregateAttribute = rest; + const newGroupByAttributes = queryData.groupBy.map((groupByAttribute) => { + const { id, ...rest } = groupByAttribute; + return rest; + }); + const newItems = queryData.filters.items.map((item) => { + const { id, ...newItem } = item; + if (item.key) { + const { id, ...rest } = item.key; + return { + ...newItem, + key: rest, + }; + } + return newItem; + }); + return { + ...queryData, + aggregateAttribute: newAggregateAttribute, + groupBy: newGroupByAttributes, + filters: { + ...queryData.filters, + items: newItems, + }, + limit: queryData.limit ? queryData.limit : 0, + offset: queryData.offset ? queryData.offset : 0, + pageSize: queryData.pageSize ? queryData.pageSize : 0, + }; + }), + }, +}); + +export const isQueryUpdatedInView = ({ + viewKey, + data, + stagedQuery, + currentPanelType, +}: IsQueryUpdatedInViewProps): boolean => { + const currentViewDetails = getViewDetailsUsingViewKey(viewKey, data); + if (!currentViewDetails) { + return false; + } + const { query, panelType } = currentViewDetails; + + // Omitting id from aggregateAttribute and groupBy + const updatedCurrentQuery = omitIdFromQuery(stagedQuery); + + if ( + updatedCurrentQuery?.builder === undefined || + updatedCurrentQuery.clickhouse_sql === undefined || + updatedCurrentQuery.promql === undefined + ) { + return false; + } + + return ( + panelType !== currentPanelType || + !isEqual(query.builder, updatedCurrentQuery?.builder) || + !isEqual(query.clickhouse_sql, updatedCurrentQuery?.clickhouse_sql) || + !isEqual(query.promql, updatedCurrentQuery?.promql) + ); +}; + +export const saveViewHandler = ({ + saveViewAsync, + refetchAllView, + notifications, + handlePopOverClose, + viewName, + compositeQuery, + sourcePage, + extraData, + redirectWithQueryBuilderData, + panelType, + form, +}: SaveViewHandlerProps): void => { + saveViewAsync( + { + viewName, + compositeQuery, + sourcePage, + extraData, + }, + { + onSuccess: (data) => { + refetchAllView(); + redirectWithQueryBuilderData(mapQueryDataFromApi(compositeQuery), { + [QueryParams.panelTypes]: panelType, + [QueryParams.viewName]: viewName, + [QueryParams.viewKey]: data.data.data, + }); + notifications.success({ + message: 'View Saved Successfully', + }); + }, + onError: (err) => { + showErrorNotification(notifications, err); + }, + onSettled: () => { + handlePopOverClose(); + form.resetFields(); + }, + }, + ); +}; + +export const deleteViewHandler = ({ + deleteViewAsync, + refetchAllView, + redirectWithQueryBuilderData, + notifications, + panelType, + viewKey, + viewId, + updateAllQueriesOperators, + sourcePage, +}: DeleteViewHandlerProps): void => { + deleteViewAsync(viewKey, { + onSuccess: () => { + if (viewId === viewKey) { + redirectWithQueryBuilderData( + updateAllQueriesOperators( + initialQueriesMap[sourcePage], + panelType || PANEL_TYPES.LIST, + sourcePage, + ), + { + [QueryParams.viewName]: '', + [QueryParams.panelTypes]: panelType, + [QueryParams.viewKey]: '', + }, + ); + } + notifications.success({ + message: 'View Deleted Successfully', + }); + refetchAllView(); + }, + onError: (err) => { + showErrorNotification(notifications, err); + }, + }); +}; + +export const trimViewName = (viewName: string): string => { + if (viewName.length > 20) { + return `${viewName.substring(0, 20)}...`; + } + return viewName; +}; diff --git a/signoz/frontend/src/components/Graph/Plugin/DragSelect.ts b/signoz/frontend/src/components/Graph/Plugin/DragSelect.ts new file mode 100644 index 0000000..400b870 --- /dev/null +++ b/signoz/frontend/src/components/Graph/Plugin/DragSelect.ts @@ -0,0 +1,321 @@ +import { Chart, ChartTypeRegistry, Plugin } from 'chart.js'; +import { getRelativePosition } from 'chart.js/helpers'; + +// utils +import { ChartEventHandler, mergeDefaultOptions } from './utils'; + +export const dragSelectPluginId = 'drag-select-plugin'; + +type ChartDragHandlers = { + mousedown: ChartEventHandler; + mousemove: ChartEventHandler; + mouseup: ChartEventHandler; + globalMouseup: () => void; +}; + +export type DragSelectPluginOptions = { + color?: string; + onSelect?: (startValueX: number, endValueX: number) => void; +}; + +const defaultDragSelectPluginOptions: Required = { + color: 'rgba(0, 0, 0, 0.5)', + onSelect: () => {}, +}; + +export function createDragSelectPluginOptions( + isEnabled: boolean, + onSelect?: (start: number, end: number) => void, + color?: string, +): DragSelectPluginOptions | false { + if (!isEnabled) { + return false; + } + + return { + onSelect, + color, + }; +} + +function createMousedownHandler( + chart: Chart, + dragData: DragSelectData, +): ChartEventHandler { + return (ev): void => { + const { left, right } = chart.chartArea; + + let { x: startDragPositionX } = getRelativePosition(ev, chart); + + if (left > startDragPositionX) { + startDragPositionX = left; + } + + if (right < startDragPositionX) { + startDragPositionX = right; + } + + const startValuePositionX = chart.scales.x.getValueForPixel( + startDragPositionX, + ); + + dragData.onDragStart(startDragPositionX, startValuePositionX); + }; +} + +function createMousemoveHandler( + chart: Chart, + dragData: DragSelectData, +): ChartEventHandler { + return (ev): void => { + if (!dragData.isMouseDown) { + return; + } + + const { left, right } = chart.chartArea; + + let { x: dragPositionX } = getRelativePosition(ev, chart); + + if (left > dragPositionX) { + dragPositionX = left; + } + + if (right < dragPositionX) { + dragPositionX = right; + } + + const valuePositionX = chart.scales.x.getValueForPixel(dragPositionX); + + dragData.onDrag(dragPositionX, valuePositionX); + chart.update('none'); + }; +} + +function createMouseupHandler( + chart: Chart, + options: DragSelectPluginOptions, + dragData: DragSelectData, +): ChartEventHandler { + return (ev): void => { + const { left, right } = chart.chartArea; + + let { x: endRelativePostionX } = getRelativePosition(ev, chart); + + if (left > endRelativePostionX) { + endRelativePostionX = left; + } + + if (right < endRelativePostionX) { + endRelativePostionX = right; + } + + const endValuePositionX = chart.scales.x.getValueForPixel( + endRelativePostionX, + ); + + dragData.onDragEnd(endRelativePostionX, endValuePositionX); + + chart.update('none'); + + if ( + typeof options.onSelect === 'function' && + typeof dragData.startValuePositionX === 'number' && + typeof dragData.endValuePositionX === 'number' + ) { + const start = Math.min( + dragData.startValuePositionX, + dragData.endValuePositionX, + ); + const end = Math.max( + dragData.startValuePositionX, + dragData.endValuePositionX, + ); + + options.onSelect(start, end); + } + }; +} + +function createGlobalMouseupHandler( + options: DragSelectPluginOptions, + dragData: DragSelectData, +): () => void { + return (): void => { + const { isDragging, endRelativePixelPositionX, endValuePositionX } = dragData; + + if (!isDragging) { + return; + } + + dragData.onDragEnd( + endRelativePixelPositionX as number, + endValuePositionX as number, + ); + + if ( + typeof options.onSelect === 'function' && + typeof dragData.startValuePositionX === 'number' && + typeof dragData.endValuePositionX === 'number' + ) { + const start = Math.min( + dragData.startValuePositionX, + dragData.endValuePositionX, + ); + const end = Math.max( + dragData.startValuePositionX, + dragData.endValuePositionX, + ); + + options.onSelect(start, end); + } + }; +} + +class DragSelectData { + public isDragging = false; + + public isMouseDown = false; + + public startRelativePixelPositionX: number | null = null; + + public startValuePositionX: number | null | undefined = null; + + public endRelativePixelPositionX: number | null = null; + + public endValuePositionX: number | null | undefined = null; + + public initialize(): void { + this.isDragging = false; + this.isMouseDown = false; + this.startRelativePixelPositionX = null; + this.startValuePositionX = null; + this.endRelativePixelPositionX = null; + this.endValuePositionX = null; + } + + public onDragStart( + startRelativePixelPositionX: number, + startValuePositionX: number | undefined, + ): void { + this.isDragging = false; + this.isMouseDown = true; + this.startRelativePixelPositionX = startRelativePixelPositionX; + this.startValuePositionX = startValuePositionX; + this.endRelativePixelPositionX = null; + this.endValuePositionX = null; + } + + public onDrag( + endRelativePixelPositionX: number, + endValuePositionX: number | undefined, + ): void { + this.isDragging = true; + this.endRelativePixelPositionX = endRelativePixelPositionX; + this.endValuePositionX = endValuePositionX; + } + + public onDragEnd( + endRelativePixelPositionX: number, + endValuePositionX: number | undefined, + ): void { + if (!this.isDragging) { + this.initialize(); + return; + } + + this.isDragging = false; + this.isMouseDown = false; + this.endRelativePixelPositionX = endRelativePixelPositionX; + this.endValuePositionX = endValuePositionX; + } +} + +export const createDragSelectPlugin = (): Plugin< + keyof ChartTypeRegistry, + DragSelectPluginOptions +> => { + const dragData = new DragSelectData(); + let pluginOptions: Required; + + const handlers: ChartDragHandlers = { + mousedown: () => {}, + mousemove: () => {}, + mouseup: () => {}, + globalMouseup: () => {}, + }; + + const dragSelectPlugin: Plugin< + keyof ChartTypeRegistry, + DragSelectPluginOptions + > = { + id: dragSelectPluginId, + start: (chart: Chart, _, passedOptions) => { + pluginOptions = mergeDefaultOptions( + passedOptions, + defaultDragSelectPluginOptions, + ); + + const { canvas } = chart; + + dragData.initialize(); + + const mousedownHandler = createMousedownHandler(chart, dragData); + const mousemoveHandler = createMousemoveHandler(chart, dragData); + const mouseupHandler = createMouseupHandler(chart, pluginOptions, dragData); + const globalMouseupHandler = createGlobalMouseupHandler( + pluginOptions, + dragData, + ); + + canvas.addEventListener('mousedown', mousedownHandler, { passive: true }); + canvas.addEventListener('mousemove', mousemoveHandler, { passive: true }); + canvas.addEventListener('mouseup', mouseupHandler, { passive: true }); + document.addEventListener('mouseup', globalMouseupHandler, { + passive: true, + }); + + handlers.mousedown = mousedownHandler; + handlers.mousemove = mousemoveHandler; + handlers.mouseup = mouseupHandler; + handlers.globalMouseup = globalMouseupHandler; + }, + beforeDestroy: (chart: Chart) => { + const { canvas } = chart; + + if (!canvas) { + return; + } + + canvas.removeEventListener('mousedown', handlers.mousedown); + canvas.removeEventListener('mousemove', handlers.mousemove); + canvas.removeEventListener('mouseup', handlers.mouseup); + document.removeEventListener('mouseup', handlers.globalMouseup); + }, + afterDatasetsDraw: (chart: Chart) => { + const { + startRelativePixelPositionX, + endRelativePixelPositionX, + isDragging, + } = dragData; + + if (startRelativePixelPositionX && endRelativePixelPositionX && isDragging) { + const left = Math.min( + startRelativePixelPositionX, + endRelativePixelPositionX, + ); + const right = Math.max( + startRelativePixelPositionX, + endRelativePixelPositionX, + ); + const top = chart.chartArea.top - 5; + const bottom = chart.chartArea.bottom + 5; + + /* eslint-disable-next-line no-param-reassign */ + chart.ctx.fillStyle = pluginOptions.color; + chart.ctx.fillRect(left, top, right - left, bottom - top); + } + }, + }; + + return dragSelectPlugin; +}; diff --git a/signoz/frontend/src/components/Graph/Plugin/EmptyGraph.ts b/signoz/frontend/src/components/Graph/Plugin/EmptyGraph.ts new file mode 100644 index 0000000..ab008ca --- /dev/null +++ b/signoz/frontend/src/components/Graph/Plugin/EmptyGraph.ts @@ -0,0 +1,17 @@ +import { grey } from '@ant-design/colors'; +import { Chart } from 'chart.js'; + +export const emptyGraph = { + id: 'emptyChart', + afterDraw(chart: Chart): void { + const { height, width, ctx } = chart; + chart.clear(); + ctx.save(); + ctx.textAlign = 'center'; + ctx.textBaseline = 'middle'; + ctx.font = '1.5rem sans-serif'; + ctx.fillStyle = `${grey.primary}`; + ctx.fillText('No data', width / 2, height / 2); + ctx.restore(); + }, +}; diff --git a/signoz/frontend/src/components/Graph/Plugin/IntersectionCursor.ts b/signoz/frontend/src/components/Graph/Plugin/IntersectionCursor.ts new file mode 100644 index 0000000..a3d9a4d --- /dev/null +++ b/signoz/frontend/src/components/Graph/Plugin/IntersectionCursor.ts @@ -0,0 +1,164 @@ +import { Chart, ChartEvent, ChartTypeRegistry, Plugin } from 'chart.js'; +import { getRelativePosition } from 'chart.js/helpers'; + +// utils +import { ChartEventHandler, mergeDefaultOptions } from './utils'; + +export const intersectionCursorPluginId = 'intersection-cursor-plugin'; + +export type IntersectionCursorPluginOptions = { + color?: string; + dashSize?: number; + gapSize?: number; +}; + +export const defaultIntersectionCursorPluginOptions: Required = { + color: 'white', + dashSize: 3, + gapSize: 3, +}; + +export function createIntersectionCursorPluginOptions( + isEnabled: boolean, + color?: string, + dashSize?: number, + gapSize?: number, +): IntersectionCursorPluginOptions | false { + if (!isEnabled) { + return false; + } + + return { + color, + dashSize, + gapSize, + }; +} + +function createMousemoveHandler( + chart: Chart, + cursorData: IntersectionCursorData, +): ChartEventHandler { + return (ev: ChartEvent | MouseEvent): void => { + const { left, right, top, bottom } = chart.chartArea; + + let { x, y } = getRelativePosition(ev, chart); + + if (left > x) { + x = left; + } + + if (right < x) { + x = right; + } + + if (y < top) { + y = top; + } + + if (y > bottom) { + y = bottom; + } + + cursorData.onMouseMove(x, y); + }; +} + +function createMouseoutHandler( + cursorData: IntersectionCursorData, +): ChartEventHandler { + return (): void => { + cursorData.onMouseOut(); + }; +} + +class IntersectionCursorData { + public positionX: number | null | undefined; + + public positionY: number | null | undefined; + + public initialize(): void { + this.positionX = null; + this.positionY = null; + } + + public onMouseMove(x: number | undefined, y: number | undefined): void { + this.positionX = x; + this.positionY = y; + } + + public onMouseOut(): void { + this.positionX = null; + this.positionY = null; + } +} + +export const createIntersectionCursorPlugin = (): Plugin< + keyof ChartTypeRegistry, + IntersectionCursorPluginOptions +> => { + const cursorData = new IntersectionCursorData(); + let pluginOptions: Required; + + let mousemoveHandler: (ev: ChartEvent | MouseEvent) => void; + let mouseoutHandler: (ev: ChartEvent | MouseEvent) => void; + + const intersectionCursorPlugin: Plugin< + keyof ChartTypeRegistry, + IntersectionCursorPluginOptions + > = { + id: intersectionCursorPluginId, + start: (chart: Chart, _, passedOptions) => { + const { canvas } = chart; + + cursorData.initialize(); + pluginOptions = mergeDefaultOptions( + passedOptions, + defaultIntersectionCursorPluginOptions, + ); + + mousemoveHandler = createMousemoveHandler(chart, cursorData); + mouseoutHandler = createMouseoutHandler(cursorData); + + canvas.addEventListener('mousemove', mousemoveHandler, { passive: true }); + canvas.addEventListener('mouseout', mouseoutHandler, { passive: true }); + }, + beforeDestroy: (chart: Chart) => { + const { canvas } = chart; + + if (!canvas) { + return; + } + + canvas.removeEventListener('mousemove', mousemoveHandler); + canvas.removeEventListener('mouseout', mouseoutHandler); + }, + afterDatasetsDraw: (chart: Chart) => { + const { positionX, positionY } = cursorData; + + const lineDashData = [pluginOptions.dashSize, pluginOptions.gapSize]; + + if (typeof positionX === 'number' && typeof positionY === 'number') { + const { top, bottom, left, right } = chart.chartArea; + + chart.ctx.beginPath(); + /* eslint-disable-next-line no-param-reassign */ + chart.ctx.strokeStyle = pluginOptions.color; + chart.ctx.setLineDash(lineDashData); + chart.ctx.moveTo(left, positionY); + chart.ctx.lineTo(right, positionY); + chart.ctx.stroke(); + + chart.ctx.beginPath(); + chart.ctx.setLineDash(lineDashData); + /* eslint-disable-next-line no-param-reassign */ + chart.ctx.strokeStyle = pluginOptions.color; + chart.ctx.moveTo(positionX, top); + chart.ctx.lineTo(positionX, bottom); + chart.ctx.stroke(); + } + }, + }; + + return intersectionCursorPlugin; +}; diff --git a/signoz/frontend/src/components/Graph/Plugin/Legend.ts b/signoz/frontend/src/components/Graph/Plugin/Legend.ts new file mode 100644 index 0000000..809e0d1 --- /dev/null +++ b/signoz/frontend/src/components/Graph/Plugin/Legend.ts @@ -0,0 +1,114 @@ +import { Chart, ChartType, Plugin } from 'chart.js'; +import { Events } from 'constants/events'; +import { colors } from 'lib/getRandomColor'; +import { get } from 'lodash-es'; +import { eventEmitter } from 'utils/getEventEmitter'; + +const getOrCreateLegendList = ( + chart: Chart, + id: string, + isLonger: boolean, +): HTMLUListElement => { + const legendContainer = document.getElementById(id); + let listContainer = legendContainer?.querySelector('ul'); + + if (!listContainer) { + listContainer = document.createElement('ul'); + listContainer.style.display = 'flex'; + // listContainer.style.flexDirection = isLonger ? 'column' : 'row'; + listContainer.style.margin = '0'; + listContainer.style.padding = '0'; + listContainer.style.overflowY = 'scroll'; + listContainer.style.justifyContent = isLonger ? 'start' : 'center'; + listContainer.style.alignItems = isLonger ? 'start' : 'center'; + listContainer.style.minHeight = '2rem'; + listContainer.style.height = '100%'; + listContainer.style.flexWrap = 'wrap'; + listContainer.style.justifyContent = 'center'; + listContainer.style.fontSize = '0.75rem'; + legendContainer?.appendChild(listContainer); + } + + return listContainer; +}; + +export const legend = (id: string, isLonger: boolean): Plugin => ({ + id: 'htmlLegend', + afterUpdate(chart): void { + const ul = getOrCreateLegendList(chart, id || 'legend', isLonger); + + // Remove old legend items + while (ul.firstChild) { + ul.firstChild.remove(); + } + + // Reuse the built-in legendItems generator + const items = get(chart, [ + 'options', + 'plugins', + 'legend', + 'labels', + 'generateLabels', + ]) + ? get(chart, ['options', 'plugins', 'legend', 'labels', 'generateLabels'])( + chart, + ) + : null; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + items?.forEach((item: Record, index: number) => { + const li = document.createElement('li'); + li.style.alignItems = 'center'; + li.style.cursor = 'pointer'; + li.style.display = 'flex'; + li.style.marginLeft = '10px'; + // li.style.marginTop = '5px'; + + li.onclick = (): void => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const { type } = chart.config; + if (type === 'pie' || type === 'doughnut') { + // Pie and doughnut charts only have a single dataset and visibility is per item + chart.toggleDataVisibility(index); + } else { + chart.setDatasetVisibility( + item.datasetIndex, + !chart.isDatasetVisible(item.datasetIndex), + ); + eventEmitter.emit(Events.UPDATE_GRAPH_MANAGER_TABLE, { + name: id, + index: item.datasetIndex, + }); + } + chart.update(); + }; + + // Color box + const boxSpan = document.createElement('span'); + boxSpan.style.background = `${item.strokeStyle}` || `${colors[0]}`; + boxSpan.style.borderColor = `${item?.strokeStyle}`; + boxSpan.style.borderWidth = `${item.lineWidth}px`; + boxSpan.style.display = 'inline-block'; + boxSpan.style.minHeight = '0.75rem'; + boxSpan.style.marginRight = '0.5rem'; + boxSpan.style.minWidth = '0.75rem'; + boxSpan.style.borderRadius = '50%'; + + if (item.text) { + // Text + const textContainer = document.createElement('span'); + textContainer.style.margin = '0'; + textContainer.style.padding = '0'; + textContainer.style.textDecoration = item.hidden ? 'line-through' : ''; + + const text = document.createTextNode(item.text); + textContainer.appendChild(text); + + li.appendChild(boxSpan); + li.appendChild(textContainer); + ul.appendChild(li); + } + }); + }, +}); diff --git a/signoz/frontend/src/components/Graph/Plugin/Tooltip.ts b/signoz/frontend/src/components/Graph/Plugin/Tooltip.ts new file mode 100644 index 0000000..0a37694 --- /dev/null +++ b/signoz/frontend/src/components/Graph/Plugin/Tooltip.ts @@ -0,0 +1,46 @@ +import { + ActiveElement, + ChartTypeRegistry, + Point, + TooltipModel, + TooltipXAlignment, + TooltipYAlignment, +} from 'chart.js'; + +export function TooltipPosition( + this: TooltipModel, + _: readonly ActiveElement[], + eventPosition: Point, +): ITooltipPosition { + const { + chartArea: { width }, + scales: { x, y }, + } = this.chart; + + const valueForPixelOnX = Number(x.getValueForPixel(eventPosition.x)); + const valueForPixelonY = Number(y.getValueForPixel(eventPosition.y)); + + const rightmostWidth = this.width + x.getPixelForValue(valueForPixelOnX) + 20; + + if (rightmostWidth > width) { + return { + x: x.getPixelForValue(valueForPixelOnX) - 20, + y: y.getPixelForValue(valueForPixelonY) + 10, + xAlign: 'right', + yAlign: 'top', + }; + } + return { + x: x.getPixelForValue(valueForPixelOnX) + 20, + y: y.getPixelForValue(valueForPixelonY) + 10, + xAlign: 'left', + yAlign: 'top', + }; +} + +interface ITooltipPosition { + x: number; + y: number; + xAlign: TooltipXAlignment; + yAlign: TooltipYAlignment; +} diff --git a/signoz/frontend/src/components/Graph/Plugin/index.ts b/signoz/frontend/src/components/Graph/Plugin/index.ts new file mode 100644 index 0000000..6adb3cc --- /dev/null +++ b/signoz/frontend/src/components/Graph/Plugin/index.ts @@ -0,0 +1 @@ +export * from './Legend'; diff --git a/signoz/frontend/src/components/Graph/Plugin/utils.ts b/signoz/frontend/src/components/Graph/Plugin/utils.ts new file mode 100644 index 0000000..4260e9e --- /dev/null +++ b/signoz/frontend/src/components/Graph/Plugin/utils.ts @@ -0,0 +1,20 @@ +import { ChartEvent } from 'chart.js'; + +export type ChartEventHandler = (ev: ChartEvent | MouseEvent) => void; + +export function mergeDefaultOptions>( + options: T, + defaultOptions: Required, +): Required { + const sanitizedOptions = { ...options }; + Object.keys(options).forEach((key) => { + if (sanitizedOptions[key as keyof T] === undefined) { + delete sanitizedOptions[key as keyof T]; + } + }); + + return { + ...defaultOptions, + ...sanitizedOptions, + }; +} diff --git a/signoz/frontend/src/components/Graph/__tests__/xAxisConfig.test.ts b/signoz/frontend/src/components/Graph/__tests__/xAxisConfig.test.ts new file mode 100644 index 0000000..b26a243 --- /dev/null +++ b/signoz/frontend/src/components/Graph/__tests__/xAxisConfig.test.ts @@ -0,0 +1,74 @@ +import dayjs from 'dayjs'; + +import { convertTimeRange, TIME_UNITS } from '../xAxisConfig'; + +describe('xAxisConfig for Chart', () => { + describe('convertTimeRange', () => { + it('should return relevant time units for given range', () => { + { + const start = dayjs(); + const end = start.add(10, 'millisecond'); + + expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( + TIME_UNITS.millisecond, + ); + } + { + const start = dayjs(); + const end = start.add(10, 'second'); + + expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( + TIME_UNITS.second, + ); + } + { + const start = dayjs(); + const end = start.add(10, 'minute'); + + expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( + TIME_UNITS.minute, + ); + } + { + const start = dayjs(); + const end = start.add(10, 'hour'); + + expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( + TIME_UNITS.hour, + ); + } + { + const start = dayjs(); + const end = start.add(10, 'day'); + + expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( + TIME_UNITS.day, + ); + } + { + const start = dayjs(); + const end = start.add(10, 'week'); + + expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( + TIME_UNITS.week, + ); + } + { + const start = dayjs(); + const end = start.add(10, 'month'); + + expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( + TIME_UNITS.month, + ); + } + { + const start = dayjs(); + const end = start.add(10, 'year'); + + expect(convertTimeRange(start.valueOf(), end.valueOf()).unitName).toEqual( + TIME_UNITS.year, + ); + } + }); + }); +}); diff --git a/signoz/frontend/src/components/Graph/hasData.ts b/signoz/frontend/src/components/Graph/hasData.ts new file mode 100644 index 0000000..5ba968b --- /dev/null +++ b/signoz/frontend/src/components/Graph/hasData.ts @@ -0,0 +1,19 @@ +/* eslint-disable no-restricted-syntax */ +import { ChartData } from 'chart.js'; + +export const hasData = (data: ChartData): boolean => { + const { datasets = [] } = data; + let hasData = false; + try { + for (const dataset of datasets) { + if (dataset.data.length > 0) { + hasData = true; + break; + } + } + } catch (error) { + console.error(error); + } + + return hasData; +}; diff --git a/signoz/frontend/src/components/Graph/helpers.ts b/signoz/frontend/src/components/Graph/helpers.ts new file mode 100644 index 0000000..c097dcb --- /dev/null +++ b/signoz/frontend/src/components/Graph/helpers.ts @@ -0,0 +1,8 @@ +import { themeColors } from 'constants/theme'; + +export const getAxisLabelColor = (currentTheme: string): string => { + if (currentTheme === 'light') { + return themeColors.black; + } + return themeColors.whiteCream; +}; diff --git a/signoz/frontend/src/components/Graph/index.tsx b/signoz/frontend/src/components/Graph/index.tsx new file mode 100644 index 0000000..0065f6b --- /dev/null +++ b/signoz/frontend/src/components/Graph/index.tsx @@ -0,0 +1,209 @@ +import { + BarController, + BarElement, + CategoryScale, + Chart, + Decimation, + Filler, + Legend, + LinearScale, + LineController, + LineElement, + PointElement, + SubTitle, + TimeScale, + TimeSeriesScale, + Title, + Tooltip, +} from 'chart.js'; +import annotationPlugin from 'chartjs-plugin-annotation'; +import { generateGridTitle } from 'container/GridPanelSwitch/utils'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import isEqual from 'lodash-es/isEqual'; +import { + forwardRef, + memo, + useCallback, + useEffect, + useImperativeHandle, + useMemo, + useRef, +} from 'react'; + +import { hasData } from './hasData'; +import { legend } from './Plugin'; +import { createDragSelectPlugin } from './Plugin/DragSelect'; +import { emptyGraph } from './Plugin/EmptyGraph'; +import { createIntersectionCursorPlugin } from './Plugin/IntersectionCursor'; +import { TooltipPosition as TooltipPositionHandler } from './Plugin/Tooltip'; +import { LegendsContainer } from './styles'; +import { CustomChartOptions, GraphProps, ToggleGraphProps } from './types'; +import { getGraphOptions, toggleGraph } from './utils'; +import { useXAxisTimeUnit } from './xAxisConfig'; + +Chart.register( + LineElement, + PointElement, + LineController, + CategoryScale, + LinearScale, + TimeScale, + TimeSeriesScale, + Decimation, + Filler, + Legend, + Title, + Tooltip, + SubTitle, + BarController, + BarElement, + annotationPlugin, +); + +Tooltip.positioners.custom = TooltipPositionHandler; + +const Graph = forwardRef( + ( + { + animate = true, + data, + type, + title, + isStacked, + onClickHandler, + name, + yAxisUnit = 'short', + forceReRender, + staticLine, + containerHeight, + onDragSelect, + dragSelectColor, + }, + ref, + ): JSX.Element => { + const nearestDatasetIndex = useRef(null); + const chartRef = useRef(null); + const isDarkMode = useIsDarkMode(); + const gridTitle = useMemo(() => generateGridTitle(title), [title]); + + const currentTheme = isDarkMode ? 'dark' : 'light'; + const xAxisTimeUnit = useXAxisTimeUnit(data); // Computes the relevant time unit for x axis by analyzing the time stamp data + + const lineChartRef = useRef(); + + useImperativeHandle( + ref, + (): ToggleGraphProps => ({ + toggleGraph(graphIndex: number, isVisible: boolean): void { + toggleGraph(graphIndex, isVisible, lineChartRef); + }, + }), + ); + + const getGridColor = useCallback(() => { + if (currentTheme === undefined) { + return 'rgba(231,233,237,0.1)'; + } + + if (currentTheme === 'dark') { + return 'rgba(231,233,237,0.1)'; + } + + return 'rgba(231,233,237,0.8)'; + }, [currentTheme]); + + const buildChart = useCallback(() => { + if (lineChartRef.current !== undefined) { + lineChartRef.current.destroy(); + } + + if (chartRef.current !== null) { + const options: CustomChartOptions = getGraphOptions( + animate, + staticLine, + gridTitle, + nearestDatasetIndex, + yAxisUnit, + onDragSelect, + dragSelectColor, + currentTheme, + getGridColor, + xAxisTimeUnit, + isStacked, + onClickHandler, + data, + ); + + const chartHasData = hasData(data); + const chartPlugins = []; + + if (chartHasData) { + chartPlugins.push(createIntersectionCursorPlugin()); + chartPlugins.push(createDragSelectPlugin()); + } else { + chartPlugins.push(emptyGraph); + } + + chartPlugins.push(legend(name, data.datasets.length > 3)); + + lineChartRef.current = new Chart(chartRef.current, { + type, + data, + options, + plugins: chartPlugins, + }); + } + }, [ + animate, + staticLine, + gridTitle, + yAxisUnit, + onDragSelect, + dragSelectColor, + currentTheme, + getGridColor, + xAxisTimeUnit, + isStacked, + onClickHandler, + data, + name, + type, + ]); + + useEffect(() => { + buildChart(); + }, [buildChart, forceReRender]); + + return ( +
+ + +
+ ); + }, +); + +declare module 'chart.js' { + interface TooltipPositionerMap { + custom: TooltipPositionerFunction; + } +} + +Graph.defaultProps = { + animate: undefined, + title: undefined, + isStacked: undefined, + onClickHandler: undefined, + yAxisUnit: undefined, + forceReRender: undefined, + staticLine: undefined, + containerHeight: '90%', + onDragSelect: undefined, + dragSelectColor: undefined, +}; + +Graph.displayName = 'Graph'; + +export default memo(Graph, (prevProps, nextProps) => + isEqual(prevProps.data, nextProps.data), +); diff --git a/signoz/frontend/src/components/Graph/styles.ts b/signoz/frontend/src/components/Graph/styles.ts new file mode 100644 index 0000000..371aa9d --- /dev/null +++ b/signoz/frontend/src/components/Graph/styles.ts @@ -0,0 +1,28 @@ +import { themeColors } from 'constants/theme'; +import styled from 'styled-components'; + +export const LegendsContainer = styled.div` + height: 10%; + + * { + ::-webkit-scrollbar { + width: 0.3rem; + } + ::-webkit-scrollbar:horizontal { + height: 0.3rem; + } + ::-webkit-scrollbar-track { + background: transparent; + } + ::-webkit-scrollbar-thumb { + background: ${themeColors.royalGrey}; + border-radius: 0.625rem; + } + ::-webkit-scrollbar-thumb:hover { + background: ${themeColors.matterhornGrey}; + } + ::-webkit-scrollbar-corner { + background: transparent; + } + } +`; diff --git a/signoz/frontend/src/components/Graph/types.ts b/signoz/frontend/src/components/Graph/types.ts new file mode 100644 index 0000000..4dd1d5b --- /dev/null +++ b/signoz/frontend/src/components/Graph/types.ts @@ -0,0 +1,78 @@ +import { + ActiveElement, + Chart, + ChartData, + ChartEvent, + ChartOptions, + ChartType, + TimeUnit, +} from 'chart.js'; +import { ForwardedRef, ReactNode } from 'react'; + +import { + dragSelectPluginId, + DragSelectPluginOptions, +} from './Plugin/DragSelect'; +import { + intersectionCursorPluginId, + IntersectionCursorPluginOptions, +} from './Plugin/IntersectionCursor'; + +export interface StaticLineProps { + yMin: number | undefined; + yMax: number | undefined; + borderColor: string; + borderWidth: number; + lineText: string; + textColor: string; +} + +export type GraphOnClickHandler = ( + event: ChartEvent, + elements: ActiveElement[], + chart: Chart, + data: ChartData, +) => void; + +export type ToggleGraphProps = { + toggleGraph(graphIndex: number, isVisible: boolean, reference?: string): void; +}; + +export type CustomChartOptions = ChartOptions & { + plugins: { + [dragSelectPluginId]: DragSelectPluginOptions | false; + [intersectionCursorPluginId]: IntersectionCursorPluginOptions | false; + }; +}; + +export interface GraphProps { + animate?: boolean; + type: ChartType; + data: Chart['data']; + title?: ReactNode; + isStacked?: boolean; + onClickHandler?: GraphOnClickHandler; + name: string; + yAxisUnit?: string; + forceReRender?: boolean | null | number; + staticLine?: StaticLineProps | undefined; + containerHeight?: string | number; + onDragSelect?: (start: number, end: number) => void; + dragSelectColor?: string; + ref?: ForwardedRef; +} + +export interface IAxisTimeUintConfig { + unitName: TimeUnit; + multiplier: number; +} + +export interface IAxisTimeConfig { + unitName: TimeUnit; + stepSize: number; +} + +export interface ITimeRange { + minTime: number | null; + maxTime: number | null; +} diff --git a/signoz/frontend/src/components/Graph/utils.ts b/signoz/frontend/src/components/Graph/utils.ts new file mode 100644 index 0000000..db30b6a --- /dev/null +++ b/signoz/frontend/src/components/Graph/utils.ts @@ -0,0 +1,223 @@ +import { Chart, ChartConfiguration, ChartData, Color } from 'chart.js'; +import * as chartjsAdapter from 'chartjs-adapter-date-fns'; +import dayjs from 'dayjs'; +import { MutableRefObject } from 'react'; + +import { getAxisLabelColor } from './helpers'; +import { + createDragSelectPluginOptions, + dragSelectPluginId, +} from './Plugin/DragSelect'; +import { + createIntersectionCursorPluginOptions, + intersectionCursorPluginId, +} from './Plugin/IntersectionCursor'; +import { + CustomChartOptions, + GraphOnClickHandler, + IAxisTimeConfig, + StaticLineProps, +} from './types'; +import { getToolTipValue, getYAxisFormattedValue } from './yAxisConfig'; + +export const toggleGraph = ( + graphIndex: number, + isVisible: boolean, + lineChartRef: MutableRefObject, +): void => { + if (lineChartRef && lineChartRef.current) { + const { type } = lineChartRef.current?.config as ChartConfiguration; + if (type === 'pie' || type === 'doughnut') { + lineChartRef.current?.toggleDataVisibility(graphIndex); + } else { + lineChartRef.current?.setDatasetVisibility(graphIndex, isVisible); + } + lineChartRef.current?.update(); + } +}; + +export const getGraphOptions = ( + animate: boolean, + staticLine: StaticLineProps | undefined, + title: string | undefined, + nearestDatasetIndex: MutableRefObject, + yAxisUnit: string, + onDragSelect: ((start: number, end: number) => void) | undefined, + dragSelectColor: string | undefined, + currentTheme: 'dark' | 'light', + getGridColor: () => 'rgba(231,233,237,0.1)' | 'rgba(231,233,237,0.8)', + xAxisTimeUnit: IAxisTimeConfig, + isStacked: boolean | undefined, + onClickHandler: GraphOnClickHandler | undefined, + data: ChartData, + // eslint-disable-next-line sonarjs/cognitive-complexity +): CustomChartOptions => ({ + animation: { + duration: animate ? 200 : 0, + }, + responsive: true, + maintainAspectRatio: false, + interaction: { + mode: 'index', + intersect: false, + }, + plugins: { + annotation: staticLine + ? { + annotations: [ + { + type: 'line', + yMin: staticLine.yMin, + yMax: staticLine.yMax, + borderColor: staticLine.borderColor, + borderWidth: staticLine.borderWidth, + label: { + content: staticLine.lineText, + enabled: true, + font: { + size: 10, + }, + borderWidth: 0, + position: 'start', + backgroundColor: 'transparent', + color: staticLine.textColor, + }, + }, + ], + } + : undefined, + title: { + display: title !== undefined, + text: title, + }, + legend: { + display: false, + }, + tooltip: { + callbacks: { + title(context): string | string[] { + const date = dayjs(context[0].parsed.x); + return date.format('MMM DD, YYYY, HH:mm:ss'); + }, + label(context): string | string[] { + let label = context.dataset.label || ''; + + if (label) { + label += ': '; + } + if (context.parsed.y !== null) { + label += getToolTipValue(context.parsed.y.toString(), yAxisUnit); + } + + return label; + }, + labelTextColor(labelData): Color { + if (labelData.datasetIndex === nearestDatasetIndex.current) { + return 'rgba(255, 255, 255, 1)'; + } + + return 'rgba(255, 255, 255, 0.75)'; + }, + }, + position: 'custom', + itemSort(item1, item2): number { + return item2.parsed.y - item1.parsed.y; + }, + }, + [dragSelectPluginId]: createDragSelectPluginOptions( + !!onDragSelect, + onDragSelect, + dragSelectColor, + ), + [intersectionCursorPluginId]: createIntersectionCursorPluginOptions( + !!onDragSelect, + currentTheme === 'dark' ? 'white' : 'black', + ), + }, + layout: { + padding: 0, + }, + scales: { + x: { + grid: { + display: true, + color: getGridColor(), + drawTicks: true, + }, + adapters: { + date: chartjsAdapter, + }, + time: { + unit: xAxisTimeUnit?.unitName || 'minute', + stepSize: xAxisTimeUnit?.stepSize || 1, + displayFormats: { + millisecond: 'HH:mm:ss', + second: 'HH:mm:ss', + minute: 'HH:mm', + hour: 'MM/dd HH:mm', + day: 'MM/dd', + week: 'MM/dd', + month: 'yy-MM', + year: 'yy', + }, + }, + type: 'time', + ticks: { color: getAxisLabelColor(currentTheme) }, + }, + y: { + display: true, + grid: { + display: true, + color: getGridColor(), + }, + ticks: { + color: getAxisLabelColor(currentTheme), + // Include a dollar sign in the ticks + callback(value): string { + return getYAxisFormattedValue(value.toString(), yAxisUnit); + }, + }, + }, + stacked: { + display: isStacked === undefined ? false : 'auto', + }, + }, + elements: { + line: { + tension: 0, + cubicInterpolationMode: 'monotone', + }, + point: { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + hoverBackgroundColor: (ctx: any): string => { + if (ctx?.element?.options?.borderColor) { + return ctx.element.options.borderColor; + } + return 'rgba(0,0,0,0.1)'; + }, + hoverRadius: 5, + }, + }, + onClick: (event, element, chart): void => { + if (onClickHandler) { + onClickHandler(event, element, chart, data); + } + }, + onHover: (event, _, chart): void => { + if (event.native) { + const interactions = chart.getElementsAtEventForMode( + event.native, + 'nearest', + { + intersect: false, + }, + true, + ); + + if (interactions[0]) { + // eslint-disable-next-line no-param-reassign + nearestDatasetIndex.current = interactions[0].datasetIndex; + } + } + }, +}); diff --git a/signoz/frontend/src/components/Graph/xAxisConfig.ts b/signoz/frontend/src/components/Graph/xAxisConfig.ts new file mode 100644 index 0000000..3fa0b00 --- /dev/null +++ b/signoz/frontend/src/components/Graph/xAxisConfig.ts @@ -0,0 +1,140 @@ +import { Chart, TimeUnit } from 'chart.js'; +import { useMemo } from 'react'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +import { IAxisTimeConfig, IAxisTimeUintConfig, ITimeRange } from './types'; + +export const TIME_UNITS: Record = { + millisecond: 'millisecond', + second: 'second', + minute: 'minute', + hour: 'hour', + day: 'day', + week: 'week', + month: 'month', + year: 'year', + quarter: 'quarter', +}; + +const TIME_UNITS_CONFIG: IAxisTimeUintConfig[] = [ + { + unitName: TIME_UNITS.millisecond, + multiplier: 1, + }, + { + unitName: TIME_UNITS.second, + multiplier: 1 / 1e3, + }, + { + unitName: TIME_UNITS.minute, + multiplier: 1 / (1e3 * 60), + }, + { + unitName: TIME_UNITS.hour, + multiplier: 1 / (1e3 * 60 * 60), + }, + { + unitName: TIME_UNITS.day, + multiplier: 1 / (1e3 * 60 * 60 * 24), + }, + { + unitName: TIME_UNITS.week, + multiplier: 1 / (1e3 * 60 * 60 * 24 * 7), + }, + { + unitName: TIME_UNITS.month, + multiplier: 1 / (1e3 * 60 * 60 * 24 * 30), + }, + { + unitName: TIME_UNITS.year, + multiplier: 1 / (1e3 * 60 * 60 * 24 * 365), + }, +]; + +/** + * Finds the relevant time unit based on the input time stamps (in ms) + */ +export const convertTimeRange = ( + start: number, + end: number, +): IAxisTimeConfig => { + const MIN_INTERVALS = 6; + const range = end - start; + let relevantTimeUnit = TIME_UNITS_CONFIG[1]; + let stepSize = 1; + try { + for (let idx = TIME_UNITS_CONFIG.length - 1; idx >= 0; idx -= 1) { + const timeUnit = TIME_UNITS_CONFIG[idx]; + const units = range * timeUnit.multiplier; + const steps = units / MIN_INTERVALS; + if (steps >= 1) { + relevantTimeUnit = timeUnit; + stepSize = steps; + break; + } + } + } catch (error) { + console.error(error); + } + + return { + unitName: relevantTimeUnit.unitName, + stepSize: Math.floor(stepSize) || 1, + }; +}; + +/** + * Accepts Chart.js data's data-structure and returns the relevant time unit for the axis based on the range of the data. + */ +export const useXAxisTimeUnit = (data: Chart['data']): IAxisTimeConfig => { + // Local time is the time range inferred from the input chart data. + let localTime: ITimeRange | null; + try { + let minTime = Number.POSITIVE_INFINITY; + let maxTime = Number.NEGATIVE_INFINITY; + data?.labels?.forEach((timeStamp: unknown): void => { + const getTimeStamp = (time: Date | number): Date | number | string => { + if (time instanceof Date) { + return Date.parse(time.toString()); + } + + return time; + }; + const time = getTimeStamp(timeStamp as Date | number); + + minTime = Math.min(parseInt(time.toString(), 10), minTime); + maxTime = Math.max(parseInt(time.toString(), 10), maxTime); + }); + + localTime = { + minTime: minTime === Number.POSITIVE_INFINITY ? null : minTime, + maxTime: maxTime === Number.NEGATIVE_INFINITY ? null : maxTime, + }; + } catch (error) { + localTime = null; + console.error(error); + } + + // Global time is the time selected from the global time selector menu. + const globalTime = useSelector( + (state) => state.globalTime, + ); + + // Use local time if valid else use the global time range + const { maxTime, minTime } = useMemo(() => { + if (localTime && localTime.maxTime && localTime.minTime) { + return { + minTime: localTime.minTime, + maxTime: localTime.maxTime, + }; + } + return { + minTime: globalTime.minTime / 1e6, + maxTime: globalTime.maxTime / 1e6, + }; + }, [globalTime, localTime]); + + return convertTimeRange(minTime, maxTime); +}; diff --git a/signoz/frontend/src/components/Graph/yAxisConfig.ts b/signoz/frontend/src/components/Graph/yAxisConfig.ts new file mode 100644 index 0000000..a5eca12 --- /dev/null +++ b/signoz/frontend/src/components/Graph/yAxisConfig.ts @@ -0,0 +1,58 @@ +import { formattedValueToString, getValueFormat } from '@grafana/data'; + +export const getYAxisFormattedValue = ( + value: string, + format: string, +): string => { + let decimalPrecision: number | undefined; + const parsedValue = getValueFormat(format)( + parseFloat(value), + undefined, + undefined, + undefined, + ); + try { + const decimalSplitted = parsedValue.text.split('.'); + if (decimalSplitted.length === 1) { + decimalPrecision = 0; + } else { + const decimalDigits = decimalSplitted[1].split(''); + decimalPrecision = decimalDigits.length; + let nonZeroCtr = 0; + for (let idx = 0; idx < decimalDigits.length; idx += 1) { + if (decimalDigits[idx] !== '0') { + nonZeroCtr += 1; + if (nonZeroCtr >= 2) { + decimalPrecision = idx + 1; + } + } else if (nonZeroCtr) { + decimalPrecision = idx; + break; + } + } + } + + return formattedValueToString( + getValueFormat(format)( + parseFloat(value), + decimalPrecision, + undefined, + undefined, + ), + ); + } catch (error) { + console.error(error); + } + return `${parseFloat(value)}`; +}; + +export const getToolTipValue = (value: string, format?: string): string => { + try { + return formattedValueToString( + getValueFormat(format)(parseFloat(value), undefined, undefined, undefined), + ); + } catch (error) { + console.error(error); + } + return `${value}`; +}; diff --git a/signoz/frontend/src/components/Input/index.tsx b/signoz/frontend/src/components/Input/index.tsx new file mode 100644 index 0000000..18a84f6 --- /dev/null +++ b/signoz/frontend/src/components/Input/index.tsx @@ -0,0 +1,71 @@ +import { Form, Input, InputProps, InputRef } from 'antd'; +import { + ChangeEventHandler, + FocusEventHandler, + KeyboardEventHandler, + LegacyRef, + ReactNode, + Ref, +} from 'react'; + +function InputComponent({ + value, + type = 'text', + onChangeHandler, + placeholder, + ref, + size = 'small', + onBlurHandler, + onPressEnterHandler, + label, + labelOnTop, + addonBefore, + ...props +}: InputComponentProps): JSX.Element { + return ( + + } + size={size} + addonBefore={addonBefore} + onBlur={onBlurHandler} + onPressEnter={onPressEnterHandler} + // eslint-disable-next-line react/jsx-props-no-spreading + {...props} + /> + + ); +} + +interface InputComponentProps extends InputProps { + value: InputProps['value']; + type?: InputProps['type']; + onChangeHandler?: ChangeEventHandler; + placeholder?: InputProps['placeholder']; + ref?: LegacyRef; + size?: InputProps['size']; + onBlurHandler?: FocusEventHandler; + onPressEnterHandler?: KeyboardEventHandler; + label?: string; + labelOnTop?: boolean; + addonBefore?: ReactNode; +} + +InputComponent.defaultProps = { + type: undefined, + onChangeHandler: undefined, + placeholder: undefined, + ref: undefined, + size: undefined, + onBlurHandler: undefined, + onPressEnterHandler: undefined, + label: undefined, + labelOnTop: undefined, + addonBefore: undefined, +}; + +export default InputComponent; diff --git a/signoz/frontend/src/components/Loadable/Loadable.test.tsx b/signoz/frontend/src/components/Loadable/Loadable.test.tsx new file mode 100644 index 0000000..2d2a217 --- /dev/null +++ b/signoz/frontend/src/components/Loadable/Loadable.test.tsx @@ -0,0 +1,49 @@ +import { + render, + screen, + waitForElementToBeRemoved, +} from '@testing-library/react'; +import React, { ComponentType, Suspense } from 'react'; + +import Loadable from './index'; + +// Sample component to be loaded lazily +function SampleComponent(): JSX.Element { + return
Sample Component
; +} + +const loadSampleComponent = (): Promise<{ + default: ComponentType; +}> => + new Promise<{ default: ComponentType }>((resolve) => { + setTimeout(() => { + resolve({ default: SampleComponent }); + }, 500); + }); + +describe('Loadable', () => { + it('should render the lazily loaded component', async () => { + const LoadableSampleComponent = Loadable(loadSampleComponent); + + const { container } = render( + Loading...}> + + , + ); + + expect(screen.getByText('Loading...')).toBeInTheDocument(); + await waitForElementToBeRemoved(() => screen.queryByText('Loading...')); + + expect(container.querySelector('div')).toHaveTextContent('Sample Component'); + }); + + it('should call lazy with the provided import path', () => { + const reactLazySpy = jest.spyOn(React, 'lazy'); + Loadable(loadSampleComponent); + + expect(reactLazySpy).toHaveBeenCalledTimes(1); + expect(reactLazySpy).toHaveBeenCalledWith(expect.any(Function)); + + reactLazySpy.mockRestore(); + }); +}); diff --git a/signoz/frontend/src/components/Loadable/index.tsx b/signoz/frontend/src/components/Loadable/index.tsx new file mode 100644 index 0000000..e562b56 --- /dev/null +++ b/signoz/frontend/src/components/Loadable/index.tsx @@ -0,0 +1,16 @@ +import { ComponentType, lazy, LazyExoticComponent } from 'react'; +import { lazyRetry } from 'utils/lazyWithRetries'; + +function Loadable(importPath: { + (): LoadableProps; +}): LazyExoticComponent { + return lazy(() => lazyRetry(() => importPath())); +} + +type LazyComponent = ComponentType>; + +type LoadableProps = Promise<{ + default: LazyComponent; +}>; + +export default Loadable; diff --git a/signoz/frontend/src/components/LogDetail/LogDetail.interfaces.ts b/signoz/frontend/src/components/LogDetail/LogDetail.interfaces.ts new file mode 100644 index 0000000..399e1df --- /dev/null +++ b/signoz/frontend/src/components/LogDetail/LogDetail.interfaces.ts @@ -0,0 +1,14 @@ +import { DrawerProps } from 'antd'; +import { AddToQueryHOCProps } from 'components/Logs/AddToQueryHOC'; +import { ActionItemProps } from 'container/LogDetailedView/ActionItem'; +import { ILog } from 'types/api/logs/log'; + +import { VIEWS } from './constants'; + +export type LogDetailProps = { + log: ILog | null; + selectedTab: VIEWS; + isListViewPanel?: boolean; +} & Pick & + Partial> & + Pick; diff --git a/signoz/frontend/src/components/LogDetail/LogDetails.styles.scss b/signoz/frontend/src/components/LogDetail/LogDetails.styles.scss new file mode 100644 index 0000000..5cd014b --- /dev/null +++ b/signoz/frontend/src/components/LogDetail/LogDetails.styles.scss @@ -0,0 +1,232 @@ +.log-detail-drawer { + border-left: 1px solid var(--bg-slate-500); + background: var(--bg-ink-400); + box-shadow: -4px 10px 16px 2px rgba(0, 0, 0, 0.2); + + .ant-drawer-header { + padding: 8px 16px; + border-bottom: none; + + align-items: stretch; + + border-bottom: 1px solid var(--bg-slate-500); + background: var(--bg-ink-400); + } + + .ant-drawer-close { + margin-inline-end: 0px; + } + + .ant-drawer-body { + display: flex; + flex-direction: column; + padding: 16px; + } + + .title { + color: var(--text-vanilla-400); + font-family: Inter; + font-size: var(--font-size-sm); + font-style: normal; + font-weight: var(--font-weight-normal); + line-height: 20px; /* 142.857% */ + letter-spacing: -0.07px; + } + + .radio-button { + display: flex; + align-items: center; + justify-content: center; + padding-top: var(--padding-1); + border: 1px solid var(--bg-slate-400); + background: var(--bg-ink-300); + box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1); + } + + .log-detail-drawer__log { + width: 100%; + display: flex; + align-items: center; + gap: 4px; + position: relative; + + .log-body { + font-family: 'SF Mono'; + font-family: 'Geist Mono'; + + font-size: var(--font-size-sm); + font-weight: var(--font-weight-normal); + line-height: 18px; + letter-spacing: -0.07px; + white-space: nowrap; + text-overflow: ellipsis; + overflow: hidden; + color: var(--text-vanilla-400); + opacity: 0.6; + } + + .log-type-indicator { + height: 24px; + border: 2px solid var(--bg-slate-400); + border-radius: 5px; + margin-left: 0; + + &.INFO { + border-color: #1d212d; + } + + &.WARNING { + border-color: #ffcd56; + } + + &.ERROR { + border-color: #e5484d; + } + } + + .log-overflow-shadow { + background: linear-gradient(270deg, #121317 10.4%, rgba(18, 19, 23, 0) 100%); + + width: 196px; + position: absolute; + right: 0; + } + } + + .tabs-and-search { + display: flex; + justify-content: space-between; + align-items: center; + margin: 16px 0; + + .action-btn { + border-radius: 2px; + border: 1px solid var(--bg-slate-400); + background: var(--bg-ink-300); + box-shadow: 0px 0px 8px 0px rgba(0, 0, 0, 0.1); + display: flex; + align-items: center; + justify-content: center; + } + + .json-action-btn { + display: flex; + gap: 8px; + } + } + + .views-tabs { + color: var(--text-vanilla-400); + + .view-title { + display: flex; + gap: var(--margin-2); + align-items: center; + justify-content: center; + font-size: var(--font-size-xs); + font-style: normal; + font-weight: var(--font-weight-normal); + } + + .tab { + border: 1px solid var(--bg-slate-400); + width: 114px; + } + + .tab::before { + background: var(--bg-slate-400); + } + + .selected_view { + background: var(--bg-slate-300); + color: var(--text-vanilla-100); + border: 1px solid var(--bg-slate-400); + } + + .selected_view::before { + background: var(--bg-slate-400); + } + } + + .search-input { + margin-top: var(--margin-2); + border: 1px solid var(--bg-slate-400); + height: 46px; + padding: var(--padding-1) var(--padding-2); + box-shadow: none; + border-radius: 0; + } + + .ant-drawer-close { + padding: 0px; + } +} + +.lightMode { + .ant-drawer-header { + border-bottom: 1px solid var(--bg-vanilla-400); + background: var(--bg-vanilla-100); + } + + .log-detail-drawer { + .title { + color: var(--text-ink-300); + } + + .log-detail-drawer__log { + .log-overflow-shadow { + background: linear-gradient( + 270deg, + var(--bg-vanilla-100) 10.4%, + rgba(255, 255, 255, 0) 100% + ); + } + + .log-type-indicator { + border: 2px solid var(--bg-vanilla-400); + } + + .ant-typography { + color: var(--text-ink-300); + background: transparent; + } + } + + .radio-button { + border: 1px solid var(--bg-vanilla-400); + background: var(--bg-vanilla-100); + color: var(--text-ink-300); + } + + .views-tabs { + .tab { + background: var(--bg-vanilla-100); + } + + .selected_view { + background: var(--bg-vanilla-300); + border: 1px solid var(--bg-slate-300); + color: var(--text-ink-400); + } + + .selected_view::before { + background: var(--bg-vanilla-300); + border-left: 1px solid var(--bg-slate-300); + } + } + + .tabs-and-search { + .action-btn { + border: 1px solid var(--bg-vanilla-400); + background: var(--bg-vanilla-100); + color: var(--text-ink-300); + } + } + + .search-input { + border: 1px solid var(--bg-vanilla-200); + background: var(--bg-vanilla-100); + color: var(--text-ink-300); + } + } +} diff --git a/signoz/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.styles.scss b/signoz/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.styles.scss new file mode 100644 index 0000000..2a6822d --- /dev/null +++ b/signoz/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.styles.scss @@ -0,0 +1,13 @@ +.query-builder-search-wrapper { + margin-top: 10px; + border: 1px solid var(--bg-slate-400); + border-bottom: none; + + .ant-select-selector { + border: none !important; + + input { + font-size: 12px; + } + } +} diff --git a/signoz/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.tsx b/signoz/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.tsx new file mode 100644 index 0000000..72d18b7 --- /dev/null +++ b/signoz/frontend/src/components/LogDetail/QueryBuilderSearchWrapper.tsx @@ -0,0 +1,77 @@ +import './QueryBuilderSearchWrapper.styles.scss'; + +import useInitialQuery from 'container/LogsExplorerContext/useInitialQuery'; +import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch'; +import { Dispatch, SetStateAction, useEffect } from 'react'; +import { ILog } from 'types/api/logs/log'; +import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData'; + +function QueryBuilderSearchWrapper({ + log, + filters, + contextQuery, + isEdit, + suffixIcon, + setFilters, + setContextQuery, +}: QueryBuilderSearchWraperProps): JSX.Element { + const initialContextQuery = useInitialQuery(log); + + useEffect(() => { + setContextQuery(initialContextQuery); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + const handleSearch = (tagFilters: TagFilter): void => { + const tagFiltersLength = tagFilters.items.length; + + if ( + (!tagFiltersLength && (!filters || !filters.items.length)) || + tagFiltersLength === filters?.items.length || + !contextQuery + ) + return; + + const nextQuery: Query = { + ...contextQuery, + builder: { + ...contextQuery.builder, + queryData: contextQuery.builder.queryData.map((item) => ({ + ...item, + filters: tagFilters, + })), + }, + }; + + setFilters({ ...tagFilters }); + setContextQuery({ ...nextQuery }); + }; + + // eslint-disable-next-line react/jsx-no-useless-fragment + if (!contextQuery || !isEdit) return <>; + + return ( + + ); +} + +interface QueryBuilderSearchWraperProps { + log: ILog; + isEdit: boolean; + contextQuery: Query | undefined; + setContextQuery: Dispatch>; + filters: TagFilter | null; + setFilters: Dispatch>; + suffixIcon?: React.ReactNode; +} + +QueryBuilderSearchWrapper.defaultProps = { + suffixIcon: undefined, +}; + +export default QueryBuilderSearchWrapper; diff --git a/signoz/frontend/src/components/LogDetail/constants.ts b/signoz/frontend/src/components/LogDetail/constants.ts new file mode 100644 index 0000000..92199d4 --- /dev/null +++ b/signoz/frontend/src/components/LogDetail/constants.ts @@ -0,0 +1,7 @@ +export const VIEW_TYPES = { + OVERVIEW: 'OVERVIEW', + JSON: 'JSON', + CONTEXT: 'CONTEXT', +} as const; + +export type VIEWS = typeof VIEW_TYPES[keyof typeof VIEW_TYPES]; diff --git a/signoz/frontend/src/components/LogDetail/index.tsx b/signoz/frontend/src/components/LogDetail/index.tsx new file mode 100644 index 0000000..0794ead --- /dev/null +++ b/signoz/frontend/src/components/LogDetail/index.tsx @@ -0,0 +1,211 @@ +/* eslint-disable sonarjs/cognitive-complexity */ +import './LogDetails.styles.scss'; + +import { Color, Spacing } from '@signozhq/design-tokens'; +import { Button, Divider, Drawer, Radio, Tooltip, Typography } from 'antd'; +import { RadioChangeEvent } from 'antd/lib'; +import cx from 'classnames'; +import { LogType } from 'components/Logs/LogStateIndicator/LogStateIndicator'; +import ContextView from 'container/LogDetailedView/ContextView/ContextView'; +import JSONView from 'container/LogDetailedView/JsonView'; +import Overview from 'container/LogDetailedView/Overview'; +import { aggregateAttributesResourcesToString } from 'container/LogDetailedView/utils'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { useNotifications } from 'hooks/useNotifications'; +import { + Braces, + Copy, + Filter, + HardHat, + Table, + TextSelect, + X, +} from 'lucide-react'; +import { useState } from 'react'; +import { useCopyToClipboard } from 'react-use'; +import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData'; + +import { VIEW_TYPES, VIEWS } from './constants'; +import { LogDetailProps } from './LogDetail.interfaces'; +import QueryBuilderSearchWrapper from './QueryBuilderSearchWrapper'; + +function LogDetail({ + log, + onClose, + onAddToQuery, + onClickActionItem, + selectedTab, + isListViewPanel = false, +}: LogDetailProps): JSX.Element { + const [, copyToClipboard] = useCopyToClipboard(); + const [selectedView, setSelectedView] = useState(selectedTab); + + const [isFilterVisibile, setIsFilterVisible] = useState(false); + + const [contextQuery, setContextQuery] = useState(); + const [filters, setFilters] = useState(null); + const [isEdit, setIsEdit] = useState(false); + + const isDarkMode = useIsDarkMode(); + + const { notifications } = useNotifications(); + + const LogJsonData = log ? aggregateAttributesResourcesToString(log) : ''; + + const handleModeChange = (e: RadioChangeEvent): void => { + setSelectedView(e.target.value); + setIsEdit(false); + setIsFilterVisible(false); + }; + + const handleFilterVisible = (): void => { + setIsFilterVisible(!isFilterVisibile); + setIsEdit(!isEdit); + }; + + const drawerCloseHandler = ( + e: React.MouseEvent | React.KeyboardEvent, + ): void => { + if (onClose) { + onClose(e); + } + }; + + const handleJSONCopy = (): void => { + copyToClipboard(LogJsonData); + notifications.success({ + message: 'Copied to clipboard', + }); + }; + + if (!log) { + // eslint-disable-next-line react/jsx-no-useless-fragment + return <>; + } + + const logType = log?.attributes_string?.log_level || LogType.INFO; + + return ( + + + Log details + + } + placement="right" + // closable + onClose={drawerCloseHandler} + open={log !== null} + style={{ + overscrollBehavior: 'contain', + background: isDarkMode ? Color.BG_INK_400 : Color.BG_VANILLA_100, + }} + className="log-detail-drawer" + destroyOnClose + closeIcon={} + > +
+ + + {log?.body} + + +
 
+
+ +
+ + +
+ + Overview + + + +
+ + JSON +
+
+ +
+ + Context +
+
+ + + {selectedView === VIEW_TYPES.JSON && ( +
+
+ )} + + {selectedView === VIEW_TYPES.CONTEXT && ( +
+ ); +} + +export default LogsTableView; diff --git a/signoz/frontend/src/components/Logs/TableView/styles.ts b/signoz/frontend/src/components/Logs/TableView/styles.ts new file mode 100644 index 0000000..9213021 --- /dev/null +++ b/signoz/frontend/src/components/Logs/TableView/styles.ts @@ -0,0 +1,23 @@ +import styled from 'styled-components'; + +interface TableBodyContentProps { + linesPerRow: number; + isDarkMode?: boolean; +} + +export const TableBodyContent = styled.div` + margin-bottom: 0; + color: ${(props): string => + props.isDarkMode ? 'var(--bg-vanilla-400, #c0c1c3)' : 'var(--bg-slate-400)'}; + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: 18px; /* 128.571% */ + letter-spacing: -0.07px; + overflow: hidden; + text-overflow: ellipsis; + display: -webkit-box; + -webkit-line-clamp: ${(props): number => props.linesPerRow}; + line-clamp: ${(props): number => props.linesPerRow}; + -webkit-box-orient: vertical; +`; diff --git a/signoz/frontend/src/components/Logs/TableView/types.ts b/signoz/frontend/src/components/Logs/TableView/types.ts new file mode 100644 index 0000000..36a796a --- /dev/null +++ b/signoz/frontend/src/components/Logs/TableView/types.ts @@ -0,0 +1,35 @@ +import { ColumnsType, ColumnType } from 'antd/es/table'; +import { IField } from 'types/api/logs/fields'; +import { ILog } from 'types/api/logs/log'; + +export type ColumnTypeRender = ReturnType< + NonNullable['render']> +>; + +export type LogsTableViewProps = { + logs: ILog[]; + fields: IField[]; + linesPerRow: number; + onClickExpand?: (log: ILog) => void; +}; + +export type UseTableViewResult = { + columns: ColumnsType>; + dataSource: Record[]; +}; + +export type UseTableViewProps = { + appendTo?: 'center' | 'end'; + onOpenLogsContext?: (log: ILog) => void; + onClickExpand?: (log: ILog) => void; + activeLog?: ILog | null; + activeLogIndex?: number; + activeContextLog?: ILog | null; + isListViewPanel?: boolean; +} & LogsTableViewProps; + +export type ActionsColumnProps = { + logId: string; + logs: ILog[]; + onOpenLogsContext?: (log: ILog) => void; +}; diff --git a/signoz/frontend/src/components/Logs/TableView/useTableView.styles.scss b/signoz/frontend/src/components/Logs/TableView/useTableView.styles.scss new file mode 100644 index 0000000..3723ecc --- /dev/null +++ b/signoz/frontend/src/components/Logs/TableView/useTableView.styles.scss @@ -0,0 +1,27 @@ +.text { + color: var(--bg-vanilla-400); + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: 18px; /* 128.571% */ + letter-spacing: -0.07px; +} + +.table-timestamp { + display: flex; + align-items: center; + + .ant-typography { + margin-bottom: 0; + } + + .log-state-indicator { + padding: 0px; + } +} + +.lightMode { + .text { + color: var(--bg-slate-400); + } +} diff --git a/signoz/frontend/src/components/Logs/TableView/useTableView.tsx b/signoz/frontend/src/components/Logs/TableView/useTableView.tsx new file mode 100644 index 0000000..fd37132 --- /dev/null +++ b/signoz/frontend/src/components/Logs/TableView/useTableView.tsx @@ -0,0 +1,136 @@ +import './useTableView.styles.scss'; + +import Convert from 'ansi-to-html'; +import { Typography } from 'antd'; +import { ColumnsType } from 'antd/es/table'; +import dayjs from 'dayjs'; +import dompurify from 'dompurify'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { FlatLogData } from 'lib/logs/flatLogData'; +import { useMemo } from 'react'; +import { FORBID_DOM_PURIFY_TAGS } from 'utils/app'; + +import LogStateIndicator from '../LogStateIndicator/LogStateIndicator'; +import { getLogIndicatorTypeForTable } from '../LogStateIndicator/utils'; +import { + defaultListViewPanelStyle, + defaultTableStyle, + getDefaultCellStyle, +} from './config'; +import { TableBodyContent } from './styles'; +import { + ColumnTypeRender, + UseTableViewProps, + UseTableViewResult, +} from './types'; + +const convert = new Convert(); + +export const useTableView = (props: UseTableViewProps): UseTableViewResult => { + const { + logs, + fields, + linesPerRow, + appendTo = 'center', + activeContextLog, + activeLog, + isListViewPanel, + } = props; + + const isDarkMode = useIsDarkMode(); + + const flattenLogData = useMemo(() => logs.map((log) => FlatLogData(log)), [ + logs, + ]); + + const columns: ColumnsType> = useMemo(() => { + const fieldColumns: ColumnsType> = fields + .filter((e) => e.name !== 'id') + .map(({ name }) => ({ + title: name, + dataIndex: name, + key: name, + render: (field): ColumnTypeRender> => ({ + props: { + style: isListViewPanel + ? defaultListViewPanelStyle + : getDefaultCellStyle(isDarkMode), + }, + children: ( + + {field} + + ), + }), + })); + + if (isListViewPanel) { + return [...fieldColumns]; + } + + return [ + { + title: 'timestamp', + dataIndex: 'timestamp', + key: 'timestamp', + // https://github.com/ant-design/ant-design/discussions/36886 + render: (field, item): ColumnTypeRender> => { + const date = + typeof field === 'string' + ? dayjs(field).format('YYYY-MM-DD HH:mm:ss.SSS') + : dayjs(field / 1e6).format('YYYY-MM-DD HH:mm:ss.SSS'); + return { + children: ( +
+ + + {date} + +
+ ), + }; + }, + }, + ...(appendTo === 'center' ? fieldColumns : []), + { + title: 'body', + dataIndex: 'body', + key: 'body', + render: (field): ColumnTypeRender> => ({ + props: { + style: defaultTableStyle, + }, + children: ( + + ), + }), + }, + ...(appendTo === 'end' ? fieldColumns : []), + ]; + }, [ + fields, + isListViewPanel, + appendTo, + isDarkMode, + linesPerRow, + activeLog?.id, + activeContextLog?.id, + ]); + + return { columns, dataSource: flattenLogData }; +}; diff --git a/signoz/frontend/src/components/Logs/styles.ts b/signoz/frontend/src/components/Logs/styles.ts new file mode 100644 index 0000000..53f3119 --- /dev/null +++ b/signoz/frontend/src/components/Logs/styles.ts @@ -0,0 +1,8 @@ +import { Button } from 'antd'; +import styled from 'styled-components'; + +export const ButtonContainer = styled(Button)` + &&& { + padding-left: 0; + } +`; diff --git a/signoz/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.styles.scss b/signoz/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.styles.scss new file mode 100644 index 0000000..af325a2 --- /dev/null +++ b/signoz/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.styles.scss @@ -0,0 +1,396 @@ +.nested-menu-container { + z-index: 2; + position: absolute; + right: -2px; + margin: 6px 0; + width: 160px; + + border-radius: 4px; + + border: 1px solid var(--bg-slate-400, #1d212d); + background: linear-gradient( + 139deg, + rgba(18, 19, 23, 0.8) 0%, + rgba(18, 19, 23, 0.9) 98.68% + ); + + box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2); + backdrop-filter: blur(20px); + + .menu-container { + padding: 12px; + + .title { + font-family: Inter; + font-size: 11px; + font-weight: 600; + line-height: 18px; + letter-spacing: 0.08em; + text-align: left; + color: #52575c; + } + + .menu-items { + display: flex; + gap: 8px; + flex-direction: column; + margin-top: 12px; + } + + .item { + font-family: Inter; + font-size: 13px; + font-weight: 400; + line-height: 17px; + letter-spacing: 0.01em; + text-align: left; + + .item-label { + display: flex; + color: var(--bg-vanilla-400, #c0c1c3); + justify-content: space-between; + align-items: center; + } + + cursor: pointer; + } + } + + .horizontal-line { + height: 1px; + background: #1d212d; + } + + .max-lines-per-row { + padding: 12px; + + .title { + color: #52575c; + font-family: Inter; + font-size: 11px; + font-style: normal; + font-weight: 600; + line-height: 18px; /* 163.636% */ + letter-spacing: 0.88px; + text-transform: uppercase; + + margin-bottom: 12px; + + display: flex; + justify-content: space-between; + align-items: center; + + .lucide { + color: var(--bg-vanilla-400, #c0c1c3); + cursor: pointer; + } + } + + .max-lines-per-row-input { + display: flex; + + .ant-input-number-handler-wrap { + display: none; + } + + .ant-input-number { + min-width: 36px; + width: auto; + border-right: none; + border-left: none; + border-top: 1px solid var(--bg-slate-400); + border-bottom: 1px solid var(--bg-slate-400); + text-align: center; + height: 26px; + border-radius: 0; + + &:active, + &:focus { + border: none; + box-shadow: none; + } + } + + .ant-input-number-focused { + box-shadow: none !important; + } + + .ant-input-number-input-wrap { + input { + text-align: center; + font-size: 13px; + + &:active, + &:focus { + border: none; + } + } + + &:active, + &:focus { + border: none; + } + } + + .periscope-btn { + box-shadow: none; + padding: 6px 12px; + height: 26px; + + border-radius: 0px 1px 1px 0px; + background: var(--bg-ink-300, #16181d); + } + } + } + + .selected-item-content-container { + .add-new-column-header { + padding: 8px; + } + + .title { + color: #52575c; + font-family: Inter; + font-size: 11px; + font-style: normal; + font-weight: 600; + line-height: 18px; /* 163.636% */ + letter-spacing: 0.88px; + text-transform: uppercase; + + margin-bottom: 12px; + + display: flex; + justify-content: space-between; + align-items: center; + + .lucide { + color: var(--bg-vanilla-400, #c0c1c3); + cursor: pointer; + } + } + + .horizontal-line { + height: 1px; + background: #1d212d; + } + + .loading-container { + margin: 12px 0; + } + + .item-content { + padding: 12px; + + .column-format, + .column-format-new-options { + display: flex; + gap: 12px; + flex-direction: column; + margin-top: 12px; + + .column-name { + color: var(--bg-vanilla-400, #c0c1c3); + font-family: Inter; + font-size: 13px; + font-style: normal; + font-weight: 400; + line-height: 20px; /* 142.857% */ + letter-spacing: -0.07px; + + display: flex; + justify-content: space-between; + align-items: center; + gap: 8px; + + cursor: pointer; + + .name { + flex: 1; + overflow: hidden; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + + cursor: pointer; + } + + .delete-btn { + display: none; + flex: 0 0 16px; + cursor: pointer; + } + + &:hover { + .delete-btn { + display: block; + } + } + } + + overflow-x: hidden; + + &::-webkit-scrollbar { + height: 1rem; + width: 0.2rem; + } + } + + .column-format { + max-height: 150px; + overflow: auto; + overflow-x: hidden; + } + + .column-format-new-options { + max-height: 150px; + overflow-y: auto; + overflow-x: hidden; + } + + .column-divider { + margin: 12px 0; + border-top: 2px solid var(--bg-slate-400); + } + } + } + + &.active { + .nested-menu-container { + backdrop-filter: blur(18px); + + .item { + .item-label { + color: var(--bg-vanilla-400); + } + } + } + + .selected-item-content-container { + width: 110%; + margin-left: -5%; + + border-radius: 4px; + border: 1px solid var(--bg-slate-400); + background: linear-gradient( + 139deg, + rgba(18, 19, 23, 0.8) 0%, + rgba(18, 19, 23, 0.9) 98.68% + ); + + box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2); + backdrop-filter: blur(20px); + + .column-format { + margin-top: 0px; + } + } + } +} + +.lightMode { + .nested-menu-container { + border: 1px solid var(--bg-vanilla-300); + background: linear-gradient( + 139deg, + rgba(255, 255, 255, 0.8) 0%, + rgba(255, 255, 255, 0.9) 98.68% + ); + + box-shadow: 4px 10px 16px 2px rgba(255, 255, 255, 0.2); + + .horizontal-line { + background: var(--bg-vanilla-300); + } + + .item-content { + .column-divider { + border-top: 2px solid var(--bg-vanilla-300); + } + } + + .max-lines-per-row { + .title { + color: var(--bg-ink-200); + + .lucide { + color: var(--bg-ink-300); + } + } + + .max-lines-per-row-input { + border: 1px solid var(--bg-vanilla-300); + + .periscope-btn { + background: var(--bg-vanilla-300); + } + } + } + + .menu-container { + .title { + color: var(--bg-ink-200); + } + + .item { + .item-label { + color: var(--bg-ink-400); + } + } + } + + .selected-item-content-container { + .title { + color: var(--bg-ink-200); + + .lucide { + color: var(--bg-ink-300); + } + } + + .horizontal-line { + background: var(--bg-vanilla-300); + } + + .item-content { + .max-lines-per-row-input { + border: 1px solid var(--bg-vanilla-300); + + .periscope-btn { + background: var(--bg-vanilla-300); + } + } + + .column-format, + .column-format-new-options { + .column-name { + color: var(--bg-ink-300); + } + } + } + } + + &.active { + .nested-menu-container { + backdrop-filter: blur(18px); + + .item { + .item-label { + color: var(--bg-ink-300); + } + } + } + + .selected-item-content-container { + border: 1px solid var(--bg-vanilla-300); + background: linear-gradient( + 139deg, + rgba(255, 255, 255, 0.8) 0%, + rgba(255, 255, 255, 0.9) 98.68% + ); + + box-shadow: 4px 10px 16px 2px rgba(255, 255, 255, 0.2); + } + } + } +} diff --git a/signoz/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.tsx b/signoz/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.tsx new file mode 100644 index 0000000..3a42e9a --- /dev/null +++ b/signoz/frontend/src/components/LogsFormatOptionsMenu/LogsFormatOptionsMenu.tsx @@ -0,0 +1,242 @@ +/* eslint-disable react-hooks/exhaustive-deps */ +/* eslint-disable jsx-a11y/no-static-element-interactions */ +/* eslint-disable jsx-a11y/click-events-have-key-events */ +import './LogsFormatOptionsMenu.styles.scss'; + +import { Divider, Input, InputNumber, Tooltip } from 'antd'; +import cx from 'classnames'; +import { LogViewMode } from 'container/LogsTable'; +import { OptionsMenuConfig } from 'container/OptionsMenu/types'; +import useDebouncedFn from 'hooks/useDebouncedFunction'; +import { Check, Minus, Plus, X } from 'lucide-react'; +import { useCallback, useEffect, useState } from 'react'; + +interface LogsFormatOptionsMenuProps { + title: string; + items: any; + selectedOptionFormat: any; + config: OptionsMenuConfig; +} + +export default function LogsFormatOptionsMenu({ + title, + items, + selectedOptionFormat, + config, +}: LogsFormatOptionsMenuProps): JSX.Element { + const { maxLines, format, addColumn } = config; + const [selectedItem, setSelectedItem] = useState(selectedOptionFormat); + const maxLinesNumber = (maxLines?.value as number) || 1; + const [maxLinesPerRow, setMaxLinesPerRow] = useState(maxLinesNumber); + + const [addNewColumn, setAddNewColumn] = useState(false); + + const onChange = useCallback( + (key: LogViewMode) => { + if (!format) return; + + format.onChange(key); + }, + [format], + ); + + const handleMenuItemClick = (key: LogViewMode): void => { + setSelectedItem(key); + onChange(key); + setAddNewColumn(false); + }; + + const incrementMaxLinesPerRow = (): void => { + if (maxLinesPerRow < 10) { + setMaxLinesPerRow(maxLinesPerRow + 1); + } + }; + + const decrementMaxLinesPerRow = (): void => { + if (maxLinesPerRow > 1) { + setMaxLinesPerRow(maxLinesPerRow - 1); + } + }; + + const handleSearchValueChange = useDebouncedFn((event): void => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const value = event?.target?.value || ''; + + if (addColumn && addColumn?.onSearch) { + addColumn?.onSearch(value); + } + }, 300); + + const handleToggleAddNewColumn = (): void => { + setAddNewColumn(!addNewColumn); + }; + + const handleLinesPerRowChange = (maxLinesPerRow: number | null): void => { + if ( + maxLinesPerRow && + Number.isInteger(maxLinesNumber) && + maxLinesPerRow > 1 + ) { + setMaxLinesPerRow(maxLinesPerRow); + } + }; + + useEffect(() => { + if (maxLinesPerRow && config && config.maxLines?.onChange) { + config.maxLines.onChange(maxLinesPerRow); + } + }, [maxLinesPerRow]); + + return ( +
{ + // this is to restrict click events to propogate to parent + event.stopPropagation(); + }} + > +
+
{title}
+ +
+ {items.map( + (item: any): JSX.Element => ( +
handleMenuItemClick(item.key)} + > +
+ {item.label} + + {selectedItem === item.key && } +
+
+ ), + )} +
+
+ + {selectedItem && ( + <> + <> +
+
+
max lines per row
+
+ + + +
+
+ + +
+ {!addNewColumn &&
} + + {addNewColumn && ( +
+
+ {' '} + columns + {' '} +
+ + +
+ )} + +
+ {!addNewColumn && ( +
+ columns + {' '} +
+ )} + +
+ {addColumn?.value?.map(({ key, id }) => ( +
+
+ + {key} + +
+ addColumn.onRemove(id as string)} + /> +
+ ))} +
+ + {addColumn?.isFetching && ( +
Loading ...
+ )} + + {addNewColumn && + addColumn && + addColumn.value.length > 0 && + addColumn.options && + addColumn?.options?.length > 0 && ( + + )} + + {addNewColumn && ( +
+ {addColumn?.options?.map(({ label, value }) => ( +
{ + eve.stopPropagation(); + + if (addColumn && addColumn?.onSelect) { + addColumn?.onSelect(value, { label, disabled: false }); + } + }} + > +
+ + {label} + +
+
+ ))} +
+ )} +
+
+ + )} +
+ ); +} diff --git a/signoz/frontend/src/components/MarkdownRenderer/CodeCopyBtn/CodeCopyBtn.scss b/signoz/frontend/src/components/MarkdownRenderer/CodeCopyBtn/CodeCopyBtn.scss new file mode 100644 index 0000000..2d2166f --- /dev/null +++ b/signoz/frontend/src/components/MarkdownRenderer/CodeCopyBtn/CodeCopyBtn.scss @@ -0,0 +1,36 @@ +.code-snippet-container { + position: relative; + // background-color: rgb(43, 43, 43); + background-color: #111a2c; + border-color: #111a2c; +} + +.code-copy-btn { + position: absolute; + top: 8px; + right: 8px; + display: flex; + justify-content: flex-end; + align-items: center; + + button { + cursor: pointer; + + background-color: rgba($color: #1d1d1d, $alpha: 0.7); + color: white; + border: none; + padding: 8px; + border-radius: 3px; + transition: all 0.1s; + + &:hover { + background-color: rgba($color: #1d1d1d, $alpha: 1); + } + } + + &.copied { + button { + background-color: rgba($color: #52c41a, $alpha: 1); + } + } +} diff --git a/signoz/frontend/src/components/MarkdownRenderer/CodeCopyBtn/CodeCopyBtn.tsx b/signoz/frontend/src/components/MarkdownRenderer/CodeCopyBtn/CodeCopyBtn.tsx new file mode 100644 index 0000000..b098c42 --- /dev/null +++ b/signoz/frontend/src/components/MarkdownRenderer/CodeCopyBtn/CodeCopyBtn.tsx @@ -0,0 +1,46 @@ +/* eslint-disable prefer-destructuring */ +import './CodeCopyBtn.scss'; + +import { CheckOutlined, CopyOutlined } from '@ant-design/icons'; +import cx from 'classnames'; +import React, { useState } from 'react'; + +function CodeCopyBtn({ + children, + onCopyClick, +}: { + children: React.ReactNode; + onCopyClick?: (additionalInfo?: Record) => void; +}): JSX.Element { + const [isSnippetCopied, setIsSnippetCopied] = useState(false); + + const handleClick = (): void => { + let copiedText = ''; + if (children && Array.isArray(children)) { + setIsSnippetCopied(true); + navigator.clipboard.writeText(children[0].props.children[0]).finally(() => { + copiedText = (children[0].props.children[0] as string).slice(0, 200); // slicing is done due to the limitation in accepted char length in attributes + setTimeout(() => { + setIsSnippetCopied(false); + }, 1000); + }); + copiedText = (children[0].props.children[0] as string).slice(0, 200); + } + + onCopyClick?.({ copiedText }); + }; + + return ( +
+ +
+ ); +} + +CodeCopyBtn.defaultProps = { + onCopyClick: (): void => {}, +}; + +export default CodeCopyBtn; diff --git a/signoz/frontend/src/components/MarkdownRenderer/MarkdownRenderer.tsx b/signoz/frontend/src/components/MarkdownRenderer/MarkdownRenderer.tsx new file mode 100644 index 0000000..3b20454 --- /dev/null +++ b/signoz/frontend/src/components/MarkdownRenderer/MarkdownRenderer.tsx @@ -0,0 +1,143 @@ +/* eslint-disable no-restricted-syntax */ +/* eslint-disable react/jsx-props-no-spreading */ +/* eslint-disable @typescript-eslint/explicit-function-return-type */ + +import logEvent from 'api/common/logEvent'; +import { isEmpty } from 'lodash-es'; +import ReactMarkdown from 'react-markdown'; +import { CodeProps } from 'react-markdown/lib/ast-to-react'; +import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter'; +import { a11yDark } from 'react-syntax-highlighter/dist/cjs/styles/prism'; +import rehypeRaw from 'rehype-raw'; + +import CodeCopyBtn from './CodeCopyBtn/CodeCopyBtn'; + +interface LinkProps { + href: string; + children: React.ReactElement; +} + +function Pre({ + children, + elementDetails, + trackCopyAction, +}: { + children: React.ReactNode; + trackCopyAction: boolean; + elementDetails: Record; +}): JSX.Element { + const { trackingTitle = '', ...rest } = elementDetails; + + const handleClick = (additionalInfo?: Record): void => { + const trackingData = { ...rest, copiedContent: additionalInfo }; + + if (trackCopyAction && !isEmpty(trackingTitle)) { + logEvent(trackingTitle as string, trackingData); + } + }; + + return ( +
+			{children}
+			{children}
+		
+ ); +} + +function Code({ + node, + inline, + className = 'blog-code', + children, + ...props +}: CodeProps): JSX.Element { + const match = /language-(\w+)/.exec(className || ''); + return !inline && match ? ( + + {String(children).replace(/\n$/, '')} + + ) : ( + + {children} + + ); +} + +function Link({ href, children }: LinkProps): JSX.Element { + return ( + + {children} + + ); +} + +const interpolateMarkdown = ( + markdownContent: any, + variables: { [s: string]: unknown } | ArrayLike, +) => { + let interpolatedContent = markdownContent; + + const variableEntries = Object.entries(variables); + + // Loop through variables and replace placeholders with values + for (const [key, value] of variableEntries) { + const placeholder = `{{${key}}}`; + const regex = new RegExp(placeholder, 'g'); + interpolatedContent = interpolatedContent.replace(regex, value); + } + + return interpolatedContent; +}; + +function CustomTag({ color }: { color: string }): JSX.Element { + return

This is custom element

; +} + +function MarkdownRenderer({ + markdownContent, + variables, + trackCopyAction, + elementDetails, +}: { + markdownContent: any; + variables: any; + trackCopyAction?: boolean; + elementDetails?: Record; +}): JSX.Element { + const interpolatedMarkdown = interpolateMarkdown(markdownContent, variables); + + return ( + + Pre({ + children, + elementDetails: elementDetails ?? {}, + trackCopyAction: !!trackCopyAction, + }), + code: Code, + customtag: CustomTag, + }} + > + {interpolatedMarkdown} + + ); +} + +MarkdownRenderer.defaultProps = { + elementDetails: {}, + trackCopyAction: false, +}; + +export { Code, Link, MarkdownRenderer, Pre }; diff --git a/signoz/frontend/src/components/MessageTip/MessageTip.test.tsx b/signoz/frontend/src/components/MessageTip/MessageTip.test.tsx new file mode 100644 index 0000000..1c050c0 --- /dev/null +++ b/signoz/frontend/src/components/MessageTip/MessageTip.test.tsx @@ -0,0 +1,47 @@ +import { render, screen } from '@testing-library/react'; + +import MessageTip from './index'; + +describe('MessageTip', () => { + it('should not render when show prop is false', () => { + render( + Close} + />, + ); + + const messageTip = screen.queryByRole('alert'); + + expect(messageTip).toBeNull(); + }); + + it('should render with the provided message and action', () => { + const message = 'Test Message'; + const action = ; + + render(); + + const messageTip = screen.getByRole('alert'); + const messageText = screen.getByText(message); + const actionButton = screen.getByRole('button', { name: 'Close' }); + + expect(messageTip).toBeInTheDocument(); + expect(messageText).toBeInTheDocument(); + expect(actionButton).toBeInTheDocument(); + }); + + // taken from antd docs + // https://github.com/ant-design/ant-design/blob/master/components/alert/__tests__/index.test.tsx + it('custom action', () => { + const { container } = render( + Close} + />, + ); + expect(container.firstChild).toMatchSnapshot(); + }); +}); diff --git a/signoz/frontend/src/components/MessageTip/__snapshots__/MessageTip.test.tsx.snap b/signoz/frontend/src/components/MessageTip/__snapshots__/MessageTip.test.tsx.snap new file mode 100644 index 0000000..8d671bb --- /dev/null +++ b/signoz/frontend/src/components/MessageTip/__snapshots__/MessageTip.test.tsx.snap @@ -0,0 +1,54 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`MessageTip custom action 1`] = ` +.c0 { + -webkit-align-items: center; + -webkit-box-align: center; + -ms-flex-align: center; + align-items: center; +} + + +`; diff --git a/signoz/frontend/src/components/MessageTip/index.tsx b/signoz/frontend/src/components/MessageTip/index.tsx new file mode 100644 index 0000000..23c7403 --- /dev/null +++ b/signoz/frontend/src/components/MessageTip/index.tsx @@ -0,0 +1,27 @@ +import { ReactNode } from 'react'; + +import { StyledAlert } from './styles'; + +interface MessageTipProps { + show?: boolean; + message: ReactNode | string; + action: ReactNode | undefined; +} + +function MessageTip({ + show, + message, + action, +}: MessageTipProps): JSX.Element | null { + if (!show) return null; + + return ( + + ); +} + +MessageTip.defaultProps = { + show: false, +}; + +export default MessageTip; diff --git a/signoz/frontend/src/components/MessageTip/styles.ts b/signoz/frontend/src/components/MessageTip/styles.ts new file mode 100644 index 0000000..2bf5740 --- /dev/null +++ b/signoz/frontend/src/components/MessageTip/styles.ts @@ -0,0 +1,6 @@ +import { Alert } from 'antd'; +import styled from 'styled-components'; + +export const StyledAlert = styled(Alert)` + align-items: center; +`; diff --git a/signoz/frontend/src/components/Modal.tsx b/signoz/frontend/src/components/Modal.tsx new file mode 100644 index 0000000..a8f8bb6 --- /dev/null +++ b/signoz/frontend/src/components/Modal.tsx @@ -0,0 +1,36 @@ +import { Modal, ModalProps as Props } from 'antd'; +import { ReactElement } from 'react'; + +function CustomModal({ + title, + children, + isModalVisible, + footer, + closable = true, +}: ModalProps): JSX.Element { + return ( + + {children} + + ); +} + +interface ModalProps { + isModalVisible: boolean; + closable?: boolean; + footer?: Props['footer']; + title: string; + children: ReactElement; +} + +CustomModal.defaultProps = { + closable: undefined, + footer: undefined, +}; + +export default CustomModal; diff --git a/signoz/frontend/src/components/NotFound/NotFound.test.tsx b/signoz/frontend/src/components/NotFound/NotFound.test.tsx new file mode 100644 index 0000000..f72596e --- /dev/null +++ b/signoz/frontend/src/components/NotFound/NotFound.test.tsx @@ -0,0 +1,19 @@ +import { render } from '@testing-library/react'; +import { Provider } from 'react-redux'; +import { MemoryRouter } from 'react-router-dom'; +import store from 'store'; + +import NotFound from './index'; + +describe('Not Found page test', () => { + it('should render Not Found page without errors', () => { + const { asFragment } = render( + + + + + , + ); + expect(asFragment()).toMatchSnapshot(); + }); +}); diff --git a/signoz/frontend/src/components/NotFound/__snapshots__/NotFound.test.tsx.snap b/signoz/frontend/src/components/NotFound/__snapshots__/NotFound.test.tsx.snap new file mode 100644 index 0000000..5415d86 --- /dev/null +++ b/signoz/frontend/src/components/NotFound/__snapshots__/NotFound.test.tsx.snap @@ -0,0 +1,130 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Not Found page test should render Not Found page without errors 1`] = ` + + .c3 { + border: 2px solid #2f80ed; + box-sizing: border-box; + border-radius: 10px; + width: 400px; + background: inherit; + font-style: normal; + font-weight: normal; + font-size: 24px; + line-height: 20px; + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + -webkit-align-items: center; + -webkit-box-align: center; + -ms-flex-align: center; + align-items: center; + -webkit-box-pack: center; + -webkit-justify-content: center; + -ms-flex-pack: center; + justify-content: center; + padding-top: 14px; + padding-bottom: 14px; + color: #2f80ed; +} + +.c0 { + min-height: 80vh; + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + -webkit-flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; + -webkit-box-pack: center; + -webkit-justify-content: center; + -ms-flex-pack: center; + justify-content: center; + -webkit-align-items: center; + -webkit-box-align: center; + -ms-flex-align: center; + align-items: center; +} + +.c2 { + font-style: normal; + font-weight: 300; + font-size: 18px; + line-height: 20px; + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + -webkit-align-items: center; + -webkit-box-align: center; + -ms-flex-align: center; + align-items: center; + text-align: center; + color: #828282; + text-align: center; + margin: 0; + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + -webkit-box-pack: center; + -webkit-justify-content: center; + -ms-flex-pack: center; + justify-content: center; + -webkit-align-items: center; + -webkit-box-align: center; + -ms-flex-align: center; + align-items: center; +} + +.c1 { + min-height: 50px; + display: -webkit-box; + display: -webkit-flex; + display: -ms-flexbox; + display: flex; + -webkit-box-pack: justify; + -webkit-justify-content: space-between; + -ms-flex-pack: justify; + justify-content: space-between; + -webkit-flex-direction: column; + -ms-flex-direction: column; + flex-direction: column; + margin-bottom: 30px; + margin-top: 20px; +} + +
+ not-found +
+

+ Ah, seems like we reached a dead end! +

+

+ Page Not Found +

+
+ + Return To Services Page + +
+
+`; diff --git a/signoz/frontend/src/components/NotFound/constant.ts b/signoz/frontend/src/components/NotFound/constant.ts new file mode 100644 index 0000000..b23534f --- /dev/null +++ b/signoz/frontend/src/components/NotFound/constant.ts @@ -0,0 +1 @@ +export const defaultText = 'Ah, seems like we reached a dead end!'; diff --git a/signoz/frontend/src/components/NotFound/index.tsx b/signoz/frontend/src/components/NotFound/index.tsx new file mode 100644 index 0000000..5af3c46 --- /dev/null +++ b/signoz/frontend/src/components/NotFound/index.tsx @@ -0,0 +1,53 @@ +import getLocalStorageKey from 'api/browser/localstorage/get'; +import NotFoundImage from 'assets/NotFound'; +import { LOCALSTORAGE } from 'constants/localStorage'; +import ROUTES from 'constants/routes'; +import { useCallback } from 'react'; +import { useDispatch } from 'react-redux'; +import { Dispatch } from 'redux'; +import AppActions from 'types/actions'; +import { LOGGED_IN } from 'types/actions/app'; + +import { defaultText } from './constant'; +import { Button, Container, Text, TextContainer } from './styles'; + +function NotFound({ text = defaultText }: Props): JSX.Element { + const dispatch = useDispatch>(); + const isLoggedIn = getLocalStorageKey(LOCALSTORAGE.IS_LOGGED_IN); + + const onClickHandler = useCallback(() => { + if (isLoggedIn) { + dispatch({ + type: LOGGED_IN, + payload: { + isLoggedIn: true, + }, + }); + } + }, [dispatch, isLoggedIn]); + + return ( + + + + + {text} + Page Not Found + + + + + ); +} + +interface Props { + text?: string; +} + +NotFound.defaultProps = { + text: defaultText, +}; + +export default NotFound; diff --git a/signoz/frontend/src/components/NotFound/styles.ts b/signoz/frontend/src/components/NotFound/styles.ts new file mode 100644 index 0000000..812fba7 --- /dev/null +++ b/signoz/frontend/src/components/NotFound/styles.ts @@ -0,0 +1,60 @@ +import { Link } from 'react-router-dom'; +import styled from 'styled-components'; + +export const Button = styled(Link)` + border: 2px solid #2f80ed; + box-sizing: border-box; + border-radius: 10px; + width: 400px; + + background: inherit; + + font-style: normal; + font-weight: normal; + font-size: 24px; + line-height: 20px; + + display: flex; + align-items: center; + justify-content: center; + padding-top: 14px; + padding-bottom: 14px; + + color: #2f80ed; +`; + +export const Container = styled.div` + min-height: 80vh; + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; +`; + +export const Text = styled.p` + font-style: normal; + font-weight: 300; + font-size: 18px; + line-height: 20px; + + display: flex; + align-items: center; + text-align: center; + + color: #828282; + text-align: center; + margin: 0; + + display: flex; + justify-content: center; + align-items: center; +`; + +export const TextContainer = styled.div` + min-height: 50px; + display: flex; + justify-content: space-between; + flex-direction: column; + margin-bottom: 30px; + margin-top: 20px; +`; diff --git a/signoz/frontend/src/components/OverlayScrollbar/OverlayScrollbar.tsx b/signoz/frontend/src/components/OverlayScrollbar/OverlayScrollbar.tsx new file mode 100644 index 0000000..73c95ce --- /dev/null +++ b/signoz/frontend/src/components/OverlayScrollbar/OverlayScrollbar.tsx @@ -0,0 +1,54 @@ +import TypicalOverlayScrollbar from 'components/TypicalOverlayScrollbar/TypicalOverlayScrollbar'; +import VirtuosoOverlayScrollbar from 'components/VirtuosoOverlayScrollbar/VirtuosoOverlayScrollbar'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { PartialOptions } from 'overlayscrollbars'; +import { CSSProperties, ReactElement, useMemo } from 'react'; + +type Props = { + children: ReactElement; + isVirtuoso?: boolean; + style?: CSSProperties; + options?: PartialOptions; +}; + +function OverlayScrollbar({ + children, + isVirtuoso, + style, + options: customOptions, +}: Props): any { + const isDarkMode = useIsDarkMode(); + const options = useMemo( + () => + ({ + scrollbars: { + autoHide: 'scroll', + theme: isDarkMode ? 'os-theme-light' : 'os-theme-dark', + }, + ...(customOptions || {}), + } as PartialOptions), + [customOptions, isDarkMode], + ); + + if (isVirtuoso) { + return ( + + {children} + + ); + } + + return ( + + {children} + + ); +} + +OverlayScrollbar.defaultProps = { + isVirtuoso: false, + style: {}, + options: {}, +}; + +export default OverlayScrollbar; diff --git a/signoz/frontend/src/components/ReleaseNote/ReleaseNoteProps.ts b/signoz/frontend/src/components/ReleaseNote/ReleaseNoteProps.ts new file mode 100644 index 0000000..f240759 --- /dev/null +++ b/signoz/frontend/src/components/ReleaseNote/ReleaseNoteProps.ts @@ -0,0 +1,4 @@ +export default interface ReleaseNoteProps { + path?: string; + release?: string; +} diff --git a/signoz/frontend/src/components/ReleaseNote/Releases/ReleaseNote0120.tsx b/signoz/frontend/src/components/ReleaseNote/Releases/ReleaseNote0120.tsx new file mode 100644 index 0000000..249147f --- /dev/null +++ b/signoz/frontend/src/components/ReleaseNote/Releases/ReleaseNote0120.tsx @@ -0,0 +1,73 @@ +import { Button, Space } from 'antd'; +import setFlags from 'api/user/setFlags'; +import MessageTip from 'components/MessageTip'; +import { useCallback } from 'react'; +import { useDispatch, useSelector } from 'react-redux'; +import { Dispatch } from 'redux'; +import { AppState } from 'store/reducers'; +import AppActions from 'types/actions'; +import { UPDATE_USER_FLAG } from 'types/actions/app'; +import { UserFlags } from 'types/api/user/setFlags'; +import AppReducer from 'types/reducer/app'; + +import ReleaseNoteProps from '../ReleaseNoteProps'; + +export default function ReleaseNote0120({ + release, +}: ReleaseNoteProps): JSX.Element | null { + const { user } = useSelector((state) => state.app); + + const dispatch = useDispatch>(); + + const handleDontShow = useCallback(async (): Promise => { + const flags: UserFlags = { ReleaseNote0120Hide: 'Y' }; + + try { + dispatch({ + type: UPDATE_USER_FLAG, + payload: { + flags, + }, + }); + if (!user) { + // no user is set, so escape the routine + return; + } + + const response = await setFlags({ userId: user?.userId, flags }); + + if (response.statusCode !== 200) { + console.log('failed to complete do not show status', response.error); + } + } catch (e) { + // here we do not nothing as the cost of error is minor, + // the user can switch the do no show option again in the further. + console.log('unexpected error: failed to complete do not show status', e); + } + }, [dispatch, user]); + + return ( + + You are using {release} of SigNoz. We have introduced distributed setup in + v0.12.0 release. If you use or plan to use clickhouse queries in dashboard + or alerts, you might want to read about querying the new distributed tables{' '} + + here + +
+ } + action={ + + + + } + /> + ); +} diff --git a/signoz/frontend/src/components/ReleaseNote/index.tsx b/signoz/frontend/src/components/ReleaseNote/index.tsx new file mode 100644 index 0000000..bf788e6 --- /dev/null +++ b/signoz/frontend/src/components/ReleaseNote/index.tsx @@ -0,0 +1,66 @@ +import ReleaseNoteProps from 'components/ReleaseNote/ReleaseNoteProps'; +import ReleaseNote0120 from 'components/ReleaseNote/Releases/ReleaseNote0120'; +import ROUTES from 'constants/routes'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { UserFlags } from 'types/api/user/setFlags'; +import AppReducer from 'types/reducer/app'; + +interface ComponentMapType { + match: ( + path: string | undefined, + version: string, + userFlags: UserFlags | null, + ) => boolean; + component: ({ path, release }: ReleaseNoteProps) => JSX.Element | null; +} + +const allComponentMap: ComponentMapType[] = [ + { + match: ( + path: string | undefined, + version: string, + userFlags: UserFlags | null, + ): boolean => { + if (!path) { + return false; + } + const allowedPaths: string[] = [ + ROUTES.LIST_ALL_ALERT, + ROUTES.APPLICATION, + ROUTES.ALL_DASHBOARD, + ]; + + return ( + userFlags?.ReleaseNote0120Hide !== 'Y' && + allowedPaths.includes(path) && + version.startsWith('v0.12') + ); + }, + component: ReleaseNote0120, + }, +]; + +// ReleaseNote prints release specific warnings and notes that +// user needs to be aware of before using the upgraded version. +function ReleaseNote({ path }: ReleaseNoteProps): JSX.Element | null { + const { userFlags, currentVersion } = useSelector( + (state) => state.app, + ); + + const c = allComponentMap.find((item) => + item.match(path, currentVersion, userFlags), + ); + + if (!c) { + return null; + } + + return ; +} + +ReleaseNote.defaultProps = { + path: '', +}; + +export default ReleaseNote; diff --git a/signoz/frontend/src/components/ResizeTable/DynamicColumnTable.syles.scss b/signoz/frontend/src/components/ResizeTable/DynamicColumnTable.syles.scss new file mode 100644 index 0000000..31026f4 --- /dev/null +++ b/signoz/frontend/src/components/ResizeTable/DynamicColumnTable.syles.scss @@ -0,0 +1,31 @@ +.DynamicColumnTable { + display: flex; + flex-direction: column; + width: 100%; + + .dynamicColumnTable-button { + align-self: flex-end; + margin: 10px 0; + + &.filter-btn { + display: flex; + align-items: center; + justify-content: center; + } + } +} + +.dynamicColumnsTable-items { + display: flex; + width: 10.625rem; + justify-content: space-between; + align-items: center; +} + +@media (max-width: 768px) { + .dynamicColumnsTable-items { + flex-direction: column; + width: auto; + text-align: center; + } +} diff --git a/signoz/frontend/src/components/ResizeTable/DynamicColumnTable.tsx b/signoz/frontend/src/components/ResizeTable/DynamicColumnTable.tsx new file mode 100644 index 0000000..5a3dfd3 --- /dev/null +++ b/signoz/frontend/src/components/ResizeTable/DynamicColumnTable.tsx @@ -0,0 +1,128 @@ +/* eslint-disable react/jsx-props-no-spreading */ +import './DynamicColumnTable.syles.scss'; + +import { Button, Dropdown, Flex, MenuProps, Switch } from 'antd'; +import { ColumnGroupType, ColumnType } from 'antd/es/table'; +import { ColumnsType } from 'antd/lib/table'; +import logEvent from 'api/common/logEvent'; +import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn'; +import { SlidersHorizontal } from 'lucide-react'; +import { memo, useEffect, useState } from 'react'; +import { popupContainer } from 'utils/selectPopupContainer'; + +import ResizeTable from './ResizeTable'; +import { DynamicColumnTableProps } from './types'; +import { + getNewColumnData, + getVisibleColumns, + setVisibleColumns, +} from './utils'; + +function DynamicColumnTable({ + tablesource, + columns, + dynamicColumns, + onDragColumn, + facingIssueBtn, + shouldSendAlertsLogEvent, + ...restProps +}: DynamicColumnTableProps): JSX.Element { + const [columnsData, setColumnsData] = useState( + columns, + ); + + useEffect(() => { + setColumnsData(columns); + const visibleColumns = getVisibleColumns({ + tablesource, + columnsData: columns, + dynamicColumns, + }); + setColumnsData((prevColumns) => + prevColumns + ? [ + ...prevColumns.slice(0, prevColumns.length - 1), + ...visibleColumns, + prevColumns[prevColumns.length - 1], + ] + : undefined, + ); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [columns, dynamicColumns]); + + const onToggleHandler = ( + index: number, + column: ColumnGroupType | ColumnType, + ) => (checked: boolean, event: React.MouseEvent): void => { + event.stopPropagation(); + + if (shouldSendAlertsLogEvent) { + logEvent('Alert: Column toggled', { + column: column?.title, + action: checked ? 'Enable' : 'Disable', + }); + } + setVisibleColumns({ + tablesource, + dynamicColumns, + index, + checked, + }); + setColumnsData((prevColumns) => + getNewColumnData({ + checked, + index, + prevColumns, + dynamicColumns, + }), + ); + }; + + const items: MenuProps['items'] = + dynamicColumns?.map((column, index) => ({ + label: ( +
+
{column.title?.toString()}
+ c.key === column.key) !== -1} + onChange={onToggleHandler(index, column)} + /> +
+ ), + key: index, + type: 'checkbox', + })) || []; + + return ( +
+ + {facingIssueBtn && } + {dynamicColumns && ( + +
+ ); +} + +DynamicColumnTable.defaultProps = { + onDragColumn: undefined, +}; + +export default memo(DynamicColumnTable); diff --git a/signoz/frontend/src/components/ResizeTable/ResizableHeader.tsx b/signoz/frontend/src/components/ResizeTable/ResizableHeader.tsx new file mode 100644 index 0000000..8611a45 --- /dev/null +++ b/signoz/frontend/src/components/ResizeTable/ResizableHeader.tsx @@ -0,0 +1,44 @@ +import { SyntheticEvent, useMemo } from 'react'; +import { Resizable, ResizeCallbackData } from 'react-resizable'; + +import { enableUserSelectHack } from './config'; +import { SpanStyle } from './styles'; + +function ResizableHeader(props: ResizableHeaderProps): JSX.Element { + const { onResize, width, ...restProps } = props; + + const handle = useMemo( + () => ( + e.stopPropagation()} + /> + ), + [], + ); + + if (!width) { + // eslint-disable-next-line react/jsx-props-no-spreading + return
+ + + )} + + + + + + ); +} + +export default ExplorerControlPanel; diff --git a/signoz/frontend/src/container/ExplorerControlPanel/styles.ts b/signoz/frontend/src/container/ExplorerControlPanel/styles.ts new file mode 100644 index 0000000..0c9a799 --- /dev/null +++ b/signoz/frontend/src/container/ExplorerControlPanel/styles.ts @@ -0,0 +1,5 @@ +import styled from 'styled-components'; + +export const ContainerStyled = styled.div` + margin-bottom: 0.3rem; +`; diff --git a/signoz/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx b/signoz/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx new file mode 100644 index 0000000..a2e0eff --- /dev/null +++ b/signoz/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx @@ -0,0 +1,39 @@ +import { useEffect, useState } from 'react'; + +import ExplorerOptions, { ExplorerOptionsProps } from './ExplorerOptions'; +import { getExplorerToolBarVisibility } from './utils'; + +type ExplorerOptionsWrapperProps = Omit< + ExplorerOptionsProps, + 'isExplorerOptionDrop' +>; + +function ExplorerOptionWrapper({ + disabled, + query, + isLoading, + onExport, + sourcepage, +}: ExplorerOptionsWrapperProps): JSX.Element { + const [isExplorerOptionHidden, setIsExplorerOptionHidden] = useState(false); + + useEffect(() => { + const toolbarVisibility = getExplorerToolBarVisibility(sourcepage); + setIsExplorerOptionHidden(!toolbarVisibility); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + return ( + + ); +} + +export default ExplorerOptionWrapper; diff --git a/signoz/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss b/signoz/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss new file mode 100644 index 0000000..54e87fa --- /dev/null +++ b/signoz/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss @@ -0,0 +1,331 @@ +.explorer-options-container { + position: fixed; + bottom: 24px; + left: calc(50% + 240px); + transform: translate(calc(-50% - 120px), 0); + transition: left 0.2s linear; + + display: flex; + gap: 16px; + background-color: transparent; +} + +.hide-update { + left: calc(50% - 72px) !important; +} + +.explorer-update { + display: inline-flex; + align-items: center; + gap: 12px; + padding: 10px 12px; + border-radius: 50px; + border: 1px solid var(--bg-slate-400); + background: rgba(22, 24, 29, 0.6); + backdrop-filter: blur(20px); + + .action-icon { + display: flex; + justify-content: center; + align-items: center; + padding: 8px; + border-radius: 50px; + border: 1px solid var(--bg-slate-400); + background: var(--bg-slate-500); + cursor: pointer; + } + + .hidden { + display: none; + } + + .ant-divider { + margin: 0; + height: 28px; + border: 1px solid var(--bg-slate-400); + } +} + +.explorer-options { + padding: 10px 12px; + border: 1px solid var(--bg-slate-400); + border-radius: 50px; + background: rgba(22, 24, 29, 0.6); + backdrop-filter: blur(20px); + + cursor: default; + display: flex; + gap: 16px; + z-index: 1; + .ant-select-selector { + padding: 0 !important; + } + + hr { + border-color: #1d212d; + } + + .view-options, + .actions { + .info-icon { + padding: 8px; + } + + .hidden { + display: none; + } + + display: flex; + justify-content: center; + align-items: center; + gap: 8px; + + button { + display: flex; + justify-content: center; + align-items: center; + border: none; + + border: 1px solid #1d2023; + + box-shadow: none !important; + + &.ant-btn-round { + padding: 8px 12px 8px 10px; + font-weight: 500; + } + + &.ant-btn-round:disabled { + background-color: rgba(209, 209, 209, 0.074); + color: #5f5f5f; + } + } + + .ant-select-focused { + border-color: transparent !important; + + .ant-select-selector { + border-color: transparent !important; + box-shadow: none !important; + } + } + + .ant-select-selector { + border: transparent !important; + background-color: transparent !important; + + .ant-select-selection-placeholder { + margin-left: 12px; + } + } + } + .hidden { + display: none; + } +} + +.app-content { + &.collapsed { + .explorer-options-container { + left: calc(50% + 72px); + } + } +} + +.render-options { + display: flex; + align-items: center; + gap: 8px; + padding: 0 2px; + color: var(--bg-vanilla-400); + font-family: Inter; + font-size: 12px; + font-style: normal; + font-weight: 400; + line-height: 20px; /* 142.857% */ + letter-spacing: -0.07px; + + .dot { + margin-left: 6px; + min-height: 6px; + min-width: 6px; + border-radius: 50%; + backdrop-filter: blur(20px); + } +} + +.save-view-modal { + width: 384px !important; + .ant-modal-content { + padding: 0; + border-radius: 4px; + border: 1px solid var(--bg-slate-500); + background: var(--bg-ink-400); + box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.2); + + .ant-modal-header { + padding: 16px; + background: var(--bg-ink-400); + border-bottom: 1px solid var(--bg-slate-500); + } + + .ant-modal-body { + padding: 12px 16px 0px 16px; + + .ant-typography { + color: var(--bg-vanilla-100); + font-family: Inter; + font-size: 14px; + font-style: normal; + font-weight: 500; + line-height: 20px; /* 142.857% */ + } + + .save-view-input { + margin-top: 8px; + display: flex; + gap: 8px; + } + + .ant-color-picker-trigger { + padding: 6px; + border-radius: 2px; + border: 1px solid var(--bg-slate-400); + background: var(--bg-ink-300); + width: 32px; + height: 32px; + + .ant-color-picker-color-block { + border-radius: 50px; + width: 16px; + height: 16px; + flex-shrink: 0; + + .ant-color-picker-color-block-inner { + display: flex; + justify-content: center; + align-items: center; + } + } + } + } + + .ant-modal-footer { + display: flex; + justify-content: flex-end; + padding: 16px 16px; + margin: 0; + + > button { + display: flex; + align-items: center; + border-radius: 2px; + background-color: var(--bg-robin-500) !important; + color: var(--bg-vanilla-100) !important; + font-family: Inter; + font-size: 12px; + font-style: normal; + font-weight: 500; + line-height: 24px; + } + } + } + .title { + color: var(--bg-vanilla-100); + font-family: Inter; + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: 20px; + } +} + +.lightMode { + .explorer-options { + background: transparent; + box-shadow: none; + border: 1px solid var(--bg-vanilla-300); + backdrop-filter: blur(20px); + + hr { + border-color: var(--bg-vanilla-300); + } + + .view-options, + .actions { + button { + border: 1px solid var(--bg-vanilla-300); + color: var(--bg-ink-200); + background-color: var(--bg-vanilla-300); + } + + .info-icon { + color: var(--bg-ink-200); + } + } + } + + .render-options { + color: var(--bg-ink-200); + } + + .explorer-update { + border: 1px solid var(--bg-vanilla-300); + background: transparent; + box-shadow: 4px 4px 16px 4px rgba(255, 255, 255, 0.55); + backdrop-filter: blur(20px); + + .action-icon { + border: 1px solid var(--bg-vanilla-300); + background: var(--bg-vanilla-300); + } + + .ant-divider { + border-color: var(--bg-vanilla-300); + } + } + + .ant-tooltip-arrow { + border-top-color: var(--bg-vanilla-300) !important; + } + + .ant-tooltip-inner { + background-color: var(--bg-vanilla-300); + color: var(--bg-ink-200); + } + + .save-view-modal { + .ant-modal-content { + background: var(--bg-vanilla-200); + border-color: var(--bg-vanilla-300); + + .ant-modal-header { + background: var(--bg-vanilla-200); + border-bottom: 1px solid var(--bg-vanilla-300); + } + + .ant-modal-body { + .ant-typography { + color: var(--bg-ink-200); + } + + .ant-color-picker-trigger { + border: 1px solid var(--bg-vanilla-300); + background: var(--bg-vanilla-100); + + .ant-color-picker-color-block { + .ant-color-picker-color-block-inner { + svg { + fill: var(--bg-ink-200); + } + } + } + } + } + } + + .title { + color: var(--bg-ink-200); + } + } +} diff --git a/signoz/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx b/signoz/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx new file mode 100644 index 0000000..1386940 --- /dev/null +++ b/signoz/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx @@ -0,0 +1,567 @@ +/* eslint-disable react/jsx-props-no-spreading */ +import './ExplorerOptions.styles.scss'; + +import { InfoCircleOutlined } from '@ant-design/icons'; +import { Color } from '@signozhq/design-tokens'; +import { + Button, + ColorPicker, + Divider, + Input, + Modal, + RefSelectProps, + Select, + Tooltip, + Typography, +} from 'antd'; +import logEvent from 'api/common/logEvent'; +import axios from 'axios'; +import cx from 'classnames'; +import { getViewDetailsUsingViewKey } from 'components/ExplorerCard/utils'; +import { SOMETHING_WENT_WRONG } from 'constants/api'; +import { QueryParams } from 'constants/query'; +import { PANEL_TYPES } from 'constants/queryBuilder'; +import ROUTES from 'constants/routes'; +import ExportPanelContainer from 'container/ExportPanel/ExportPanelContainer'; +import { useGetSearchQueryParam } from 'hooks/queryBuilder/useGetSearchQueryParam'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { useGetAllViews } from 'hooks/saveViews/useGetAllViews'; +import { useSaveView } from 'hooks/saveViews/useSaveView'; +import { useUpdateView } from 'hooks/saveViews/useUpdateView'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import useErrorNotification from 'hooks/useErrorNotification'; +import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange'; +import { useNotifications } from 'hooks/useNotifications'; +import { mapCompositeQueryFromQuery } from 'lib/newQueryBuilder/queryBuilderMappers/mapCompositeQueryFromQuery'; +import { + Check, + ConciergeBell, + Disc3, + PanelBottomClose, + Plus, + X, + XCircle, +} from 'lucide-react'; +import { + CSSProperties, + Dispatch, + SetStateAction, + useCallback, + useMemo, + useRef, + useState, +} from 'react'; +import { useSelector } from 'react-redux'; +import { useHistory } from 'react-router-dom'; +import { AppState } from 'store/reducers'; +import { Dashboard } from 'types/api/dashboard/getAll'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; +import { DataSource } from 'types/common/queryBuilder'; +import AppReducer from 'types/reducer/app'; +import { USER_ROLES } from 'types/roles'; + +import ExplorerOptionsHideArea from './ExplorerOptionsHideArea'; +import { + DATASOURCE_VS_ROUTES, + generateRGBAFromHex, + getRandomColor, + saveNewViewHandler, + setExplorerToolBarVisibility, +} from './utils'; + +const allowedRoles = [USER_ROLES.ADMIN, USER_ROLES.AUTHOR, USER_ROLES.EDITOR]; + +// eslint-disable-next-line sonarjs/cognitive-complexity +function ExplorerOptions({ + disabled, + isLoading, + onExport, + query, + sourcepage, + isExplorerOptionHidden = false, + setIsExplorerOptionHidden, +}: ExplorerOptionsProps): JSX.Element { + const [isExport, setIsExport] = useState(false); + const [isSaveModalOpen, setIsSaveModalOpen] = useState(false); + const [newViewName, setNewViewName] = useState(''); + const [color, setColor] = useState(Color.BG_SIENNA_500); + const { notifications } = useNotifications(); + const history = useHistory(); + const ref = useRef(null); + const isDarkMode = useIsDarkMode(); + + const onModalToggle = useCallback((value: boolean) => { + setIsExport(value); + }, []); + + const { + currentQuery, + panelType, + isStagedQueryUpdated, + redirectWithQueryBuilderData, + } = useQueryBuilder(); + + const handleSaveViewModalToggle = (): void => { + if (sourcepage === DataSource.TRACES) { + logEvent('Traces Explorer: Save view clicked', { + panelType, + }); + } else if (sourcepage === DataSource.LOGS) { + logEvent('Logs Explorer: Save view clicked', { + panelType, + }); + } + setIsSaveModalOpen(!isSaveModalOpen); + }; + + const hideSaveViewModal = (): void => { + setIsSaveModalOpen(false); + }; + + const { role } = useSelector((state) => state.app); + + const onCreateAlertsHandler = useCallback(() => { + if (sourcepage === DataSource.TRACES) { + logEvent('Traces Explorer: Create alert', { + panelType, + }); + } else if (sourcepage === DataSource.LOGS) { + logEvent('Logs Explorer: Create alert', { + panelType, + }); + } + history.push( + `${ROUTES.ALERTS_NEW}?${QueryParams.compositeQuery}=${encodeURIComponent( + JSON.stringify(query), + )}`, + ); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [history, query]); + + const onCancel = (value: boolean) => (): void => { + onModalToggle(value); + }; + + const onAddToDashboard = (): void => { + if (sourcepage === DataSource.TRACES) { + logEvent('Traces Explorer: Add to dashboard clicked', { + panelType, + }); + } else if (sourcepage === DataSource.LOGS) { + logEvent('Logs Explorer: Add to dashboard clicked', { + panelType, + }); + } + setIsExport(true); + }; + + const { + data: viewsData, + isLoading: viewsIsLoading, + error, + isRefetching, + refetch: refetchAllView, + } = useGetAllViews(sourcepage); + + const compositeQuery = mapCompositeQueryFromQuery(currentQuery, panelType); + + const viewName = useGetSearchQueryParam(QueryParams.viewName) || ''; + const viewKey = useGetSearchQueryParam(QueryParams.viewKey) || ''; + + const extraData = viewsData?.data?.data?.find((view) => view.uuid === viewKey) + ?.extraData; + + const extraDataColor = extraData ? JSON.parse(extraData).color : ''; + const rgbaColor = generateRGBAFromHex( + extraDataColor || Color.BG_SIENNA_500, + 0.08, + ); + + const { + mutateAsync: updateViewAsync, + isLoading: isViewUpdating, + } = useUpdateView({ + compositeQuery, + viewKey, + extraData: extraData || JSON.stringify({ color: Color.BG_SIENNA_500 }), + sourcePage: sourcepage, + viewName, + }); + + const showErrorNotification = (err: Error): void => { + notifications.error({ + message: axios.isAxiosError(err) ? err.message : SOMETHING_WENT_WRONG, + }); + }; + + const onUpdateQueryHandler = (): void => { + const extraData = viewsData?.data?.data?.find((view) => view.uuid === viewKey) + ?.extraData; + updateViewAsync( + { + compositeQuery: mapCompositeQueryFromQuery(currentQuery, panelType), + viewKey, + extraData: extraData || JSON.stringify({ color: Color.BG_SIENNA_500 }), + sourcePage: sourcepage, + viewName, + }, + { + onSuccess: () => { + notifications.success({ + message: 'View Updated Successfully', + }); + refetchAllView(); + }, + onError: (err) => { + showErrorNotification(err); + }, + }, + ); + }; + + useErrorNotification(error); + + const { handleExplorerTabChange } = useHandleExplorerTabChange(); + + const onMenuItemSelectHandler = useCallback( + ({ key }: { key: string }): void => { + const currentViewDetails = getViewDetailsUsingViewKey( + key, + viewsData?.data?.data, + ); + if (!currentViewDetails) return; + const { + query, + name, + uuid, + panelType: currentPanelType, + } = currentViewDetails; + + handleExplorerTabChange(currentPanelType, { + query, + name, + uuid, + }); + }, + [viewsData, handleExplorerTabChange], + ); + + const handleSelect = ( + value: string, + option: { key: string; value: string }, + ): void => { + onMenuItemSelectHandler({ + key: option.key, + }); + if (sourcepage === DataSource.TRACES) { + logEvent('Traces Explorer: Select view', { + panelType, + viewName: option?.value, + }); + } else if (sourcepage === DataSource.LOGS) { + logEvent('Logs Explorer: Select view', { + panelType, + viewName: option?.value, + }); + } + if (ref.current) { + ref.current.blur(); + } + }; + + const handleClearSelect = (): void => { + history.replace(DATASOURCE_VS_ROUTES[sourcepage]); + }; + + const isQueryUpdated = isStagedQueryUpdated(viewsData?.data?.data, viewKey); + + const { + isLoading: isSaveViewLoading, + mutateAsync: saveViewAsync, + } = useSaveView({ + viewName: newViewName || '', + compositeQuery, + sourcePage: sourcepage, + extraData: JSON.stringify({ color }), + }); + + const onSaveHandler = (): void => { + saveNewViewHandler({ + compositeQuery, + handlePopOverClose: hideSaveViewModal, + extraData: JSON.stringify({ color }), + notifications, + panelType: panelType || PANEL_TYPES.LIST, + redirectWithQueryBuilderData, + refetchAllView, + saveViewAsync, + sourcePage: sourcepage, + viewName: newViewName, + setNewViewName, + }); + if (sourcepage === DataSource.TRACES) { + logEvent('Traces Explorer: Save view successful', { + panelType, + viewName: newViewName, + }); + } else if (sourcepage === DataSource.LOGS) { + logEvent('Logs Explorer: Save view successful', { + panelType, + viewName: newViewName, + }); + } + }; + + // TODO: Remove this and move this to scss file + const dropdownStyle: CSSProperties = useMemo( + () => ({ + borderRadius: '4px', + border: isDarkMode + ? `1px solid ${Color.BG_SLATE_400}` + : `1px solid ${Color.BG_VANILLA_300}`, + background: isDarkMode + ? 'linear-gradient(139deg, rgba(18, 19, 23, 0.80) 0%, rgba(18, 19, 23, 0.90) 98.68%)' + : 'linear-gradient(139deg, rgba(241, 241, 241, 0.8) 0%, rgba(241, 241, 241, 0.9) 98.68%)', + boxShadow: '4px 10px 16px 2px rgba(0, 0, 0, 0.20)', + backdropFilter: 'blur(20px)', + bottom: '74px', + width: '191px', + }), + [isDarkMode], + ); + + const hideToolbar = (): void => { + setExplorerToolBarVisibility(false, sourcepage); + if (setIsExplorerOptionHidden) { + setIsExplorerOptionHidden(true); + } + }; + + const isEditDeleteSupported = allowedRoles.includes(role as string); + + return ( +
+ {isQueryUpdated && !isExplorerOptionHidden && ( +
+ +
+ )} + {!isExplorerOptionHidden && ( +
+
+ + showSearch + placeholder="Select a view" + loading={viewsIsLoading || isRefetching} + value={viewName || undefined} + onSelect={handleSelect} + style={{ + minWidth: 170, + }} + dropdownStyle={dropdownStyle} + className="views-dropdown" + allowClear={{ + clearIcon: , + }} + onClear={handleClearSelect} + ref={ref} + > + {viewsData?.data?.data?.map((view) => { + const extraData = + view.extraData !== '' ? JSON.parse(view.extraData) : ''; + let bgColor = getRandomColor(); + if (extraData !== '') { + bgColor = extraData.color; + } + return ( + +
+ {' '} + {view.name} +
+
+ ); + })} + + + +
+ +
+ +
+ + + +
+
+ + {sourcepage === DataSource.LOGS + ? 'Learn more about Logs explorer ' + : 'Learn more about Traces explorer '} + + {' '} + here + {' '} +
+ } + > + + + +
+
+ )} + + + + Save this view} + open={isSaveModalOpen} + closable + onCancel={hideSaveViewModal} + footer={[ + , + ]} + > + Label +
+ setColor(hex)} + /> + setNewViewName(e.target.value)} + /> +
+
+ + + + + + ); +} + +export interface ExplorerOptionsProps { + isLoading?: boolean; + onExport: (dashboard: Dashboard | null, isNewDashboard?: boolean) => void; + query: Query | null; + disabled: boolean; + sourcepage: DataSource; + isExplorerOptionHidden?: boolean; + setIsExplorerOptionHidden?: Dispatch>; +} + +ExplorerOptions.defaultProps = { + isLoading: false, + isExplorerOptionHidden: false, + setIsExplorerOptionHidden: undefined, +}; + +export default ExplorerOptions; diff --git a/signoz/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.styles.scss b/signoz/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.styles.scss new file mode 100644 index 0000000..e45b9e8 --- /dev/null +++ b/signoz/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.styles.scss @@ -0,0 +1,55 @@ +.explorer-option-droppable-container { + position: fixed; + bottom: 0; + width: -webkit-fill-available; + height: 24px; + display: flex; + justify-content: center; + border-radius: 10px 10px 0px 0px; + // box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); + // backdrop-filter: blur(20px); + + .explorer-actions-btn { + display: flex; + gap: 8px; + margin-right: 8px; + + .action-btn { + display: flex; + justify-content: center; + align-items: center; + border-radius: 10px 10px 0px 0px; + box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); + backdrop-filter: blur(20px); + height: 24px !important; + border: none; + } + } + + .explorer-show-btn { + border-radius: 10px 10px 0px 0px; + border: 1px solid var(--bg-slate-400); + background: rgba(22, 24, 29, 0.40); + box-shadow: 0px 4px 16px 0px rgba(0, 0, 0, 0.25); + backdrop-filter: blur(20px); + align-self: center; + padding: 8px 12px; + height: 24px !important; + + .menu-bar { + border-radius: 50px; + background: var(--bg-slate-200); + height: 4px; + width: 50px; + } + } +} + +.lightMode { + .explorer-option-droppable-container { + + .explorer-show-btn { + background: var(--bg-vanilla-200); + } + } +} \ No newline at end of file diff --git a/signoz/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.tsx b/signoz/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.tsx new file mode 100644 index 0000000..a420c25 --- /dev/null +++ b/signoz/frontend/src/container/ExplorerOptions/ExplorerOptionsHideArea.tsx @@ -0,0 +1,82 @@ +/* eslint-disable no-nested-ternary */ +import './ExplorerOptionsHideArea.styles.scss'; + +import { Color } from '@signozhq/design-tokens'; +import { Button, Tooltip } from 'antd'; +import { Disc3, X } from 'lucide-react'; +import { Dispatch, SetStateAction } from 'react'; +import { DataSource } from 'types/common/queryBuilder'; + +import { setExplorerToolBarVisibility } from './utils'; + +interface DroppableAreaProps { + isQueryUpdated: boolean; + isExplorerOptionHidden?: boolean; + sourcepage: DataSource; + setIsExplorerOptionHidden?: Dispatch>; + handleClearSelect: () => void; + onUpdateQueryHandler: () => void; + isEditDeleteSupported: boolean; +} + +function ExplorerOptionsHideArea({ + isQueryUpdated, + isExplorerOptionHidden, + sourcepage, + setIsExplorerOptionHidden, + handleClearSelect, + onUpdateQueryHandler, + isEditDeleteSupported, +}: DroppableAreaProps): JSX.Element { + const handleShowExplorerOption = (): void => { + if (setIsExplorerOptionHidden) { + setIsExplorerOptionHidden(false); + setExplorerToolBarVisibility(true, sourcepage); + } + }; + + return ( +
+ {isExplorerOptionHidden && ( + <> + {isQueryUpdated && ( +
+ +
+ )} + + + )} +
+ ); +} + +ExplorerOptionsHideArea.defaultProps = { + isExplorerOptionHidden: undefined, + setIsExplorerOptionHidden: undefined, +}; + +export default ExplorerOptionsHideArea; diff --git a/signoz/frontend/src/container/ExplorerOptions/types.ts b/signoz/frontend/src/container/ExplorerOptions/types.ts new file mode 100644 index 0000000..398fe0d --- /dev/null +++ b/signoz/frontend/src/container/ExplorerOptions/types.ts @@ -0,0 +1,28 @@ +import { NotificationInstance } from 'antd/es/notification/interface'; +import { AxiosResponse } from 'axios'; +import { SaveViewWithNameProps } from 'components/ExplorerCard/types'; +import { PANEL_TYPES } from 'constants/queryBuilder'; +import { Dispatch, SetStateAction } from 'react'; +import { UseMutateAsyncFunction } from 'react-query'; +import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery'; +import { SaveViewPayloadProps, SaveViewProps } from 'types/api/saveViews/types'; +import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder'; + +export interface SaveNewViewHandlerProps { + viewName: string; + compositeQuery: ICompositeMetricQuery; + sourcePage: DataSource; + extraData: SaveViewProps['extraData']; + panelType: PANEL_TYPES | null; + notifications: NotificationInstance; + refetchAllView: SaveViewWithNameProps['refetchAllView']; + saveViewAsync: UseMutateAsyncFunction< + AxiosResponse, + Error, + SaveViewProps, + SaveViewPayloadProps + >; + handlePopOverClose: SaveViewWithNameProps['handlePopOverClose']; + redirectWithQueryBuilderData: QueryBuilderContextType['redirectWithQueryBuilderData']; + setNewViewName: Dispatch>; +} diff --git a/signoz/frontend/src/container/ExplorerOptions/utils.ts b/signoz/frontend/src/container/ExplorerOptions/utils.ts new file mode 100644 index 0000000..d94e641 --- /dev/null +++ b/signoz/frontend/src/container/ExplorerOptions/utils.ts @@ -0,0 +1,121 @@ +import { Color } from '@signozhq/design-tokens'; +import { showErrorNotification } from 'components/ExplorerCard/utils'; +import { LOCALSTORAGE } from 'constants/localStorage'; +import { QueryParams } from 'constants/query'; +import ROUTES from 'constants/routes'; +import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi'; +import { DataSource } from 'types/common/queryBuilder'; + +import { SaveNewViewHandlerProps } from './types'; + +export const getRandomColor = (): Color => { + const colorKeys = Object.keys(Color) as (keyof typeof Color)[]; + const randomKey = colorKeys[Math.floor(Math.random() * colorKeys.length)]; + return Color[randomKey]; +}; + +export const DATASOURCE_VS_ROUTES: Record = { + [DataSource.METRICS]: '', + [DataSource.TRACES]: ROUTES.TRACES_EXPLORER, + [DataSource.LOGS]: ROUTES.LOGS_EXPLORER, +}; + +export const saveNewViewHandler = ({ + saveViewAsync, + refetchAllView, + notifications, + handlePopOverClose, + viewName, + compositeQuery, + sourcePage, + extraData, + redirectWithQueryBuilderData, + panelType, + setNewViewName, +}: SaveNewViewHandlerProps): void => { + saveViewAsync( + { + viewName, + compositeQuery, + sourcePage, + extraData, + }, + { + onSuccess: (data) => { + refetchAllView(); + redirectWithQueryBuilderData(mapQueryDataFromApi(compositeQuery), { + [QueryParams.panelTypes]: panelType, + [QueryParams.viewName]: viewName, + [QueryParams.viewKey]: data.data.data, + }); + notifications.success({ + message: 'View Saved Successfully', + }); + }, + onError: (err) => { + showErrorNotification(notifications, err); + }, + onSettled: () => { + handlePopOverClose(); + setNewViewName(''); + }, + }, + ); +}; + +export const generateRGBAFromHex = (hex: string, opacity: number): string => + `rgba(${parseInt(hex.slice(1, 3), 16)}, ${parseInt( + hex.slice(3, 5), + 16, + )}, ${parseInt(hex.slice(5, 7), 16)}, ${opacity})`; + +export const getExplorerToolBarVisibility = (dataSource: string): boolean => { + try { + const showExplorerToolbar = localStorage.getItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + ); + if (showExplorerToolbar === null) { + const parsedShowExplorerToolbar: { + [DataSource.LOGS]: boolean; + [DataSource.TRACES]: boolean; + [DataSource.METRICS]: boolean; + } = { + [DataSource.METRICS]: true, + [DataSource.TRACES]: true, + [DataSource.LOGS]: true, + }; + localStorage.setItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + JSON.stringify(parsedShowExplorerToolbar), + ); + return true; + } + const parsedShowExplorerToolbar = JSON.parse(showExplorerToolbar || '{}'); + return parsedShowExplorerToolbar[dataSource]; + } catch (error) { + console.error(error); + return false; + } +}; + +export const setExplorerToolBarVisibility = ( + value: boolean, + dataSource: string, +): void => { + try { + const showExplorerToolbar = localStorage.getItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + ); + if (showExplorerToolbar) { + const parsedShowExplorerToolbar = JSON.parse(showExplorerToolbar); + parsedShowExplorerToolbar[dataSource] = value; + localStorage.setItem( + LOCALSTORAGE.SHOW_EXPLORER_TOOLBAR, + JSON.stringify(parsedShowExplorerToolbar), + ); + return; + } + } catch (error) { + console.error(error); + } +}; diff --git a/signoz/frontend/src/container/ExplorerOrderBy/index.tsx b/signoz/frontend/src/container/ExplorerOrderBy/index.tsx new file mode 100644 index 0000000..54747f5 --- /dev/null +++ b/signoz/frontend/src/container/ExplorerOrderBy/index.tsx @@ -0,0 +1,73 @@ +import { Select, Spin } from 'antd'; +import { OrderByFilterProps } from 'container/QueryBuilder/filters/OrderByFilter/OrderByFilter.interfaces'; +import { useOrderByFilter } from 'container/QueryBuilder/filters/OrderByFilter/useOrderByFilter'; +import { selectStyle } from 'container/QueryBuilder/filters/QueryBuilderSearch/config'; +import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys'; +import { memo, useMemo } from 'react'; +import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; +import { StringOperators } from 'types/common/queryBuilder'; + +function ExplorerOrderBy({ query, onChange }: OrderByFilterProps): JSX.Element { + const { + debouncedSearchText, + selectedValue, + aggregationOptions, + generateOptions, + createOptions, + handleChange, + handleSearchKeys, + } = useOrderByFilter({ query, onChange }); + + const { data, isFetching } = useGetAggregateKeys( + { + aggregateAttribute: query.aggregateAttribute.key, + dataSource: query.dataSource, + aggregateOperator: query.aggregateOperator, + searchText: debouncedSearchText, + }, + { + keepPreviousData: true, + }, + ); + + const options = useMemo(() => { + const keysOptions = createOptions(data?.payload?.attributeKeys || []); + + const customOptions = createOptions([ + { key: 'timestamp', isColumn: true, type: '', dataType: DataTypes.EMPTY }, + ]); + + const baseOptions = [ + ...customOptions, + ...(query.aggregateOperator === StringOperators.NOOP + ? [] + : aggregationOptions), + ...keysOptions, + ]; + + return generateOptions(baseOptions); + }, [ + aggregationOptions, + createOptions, + data?.payload?.attributeKeys, + generateOptions, + query.aggregateOperator, + ]); + + return ( + + + + {/* +
; + } + + return ( + + {/* eslint-disable-next-line react/jsx-props-no-spreading */} + + + ); +} + +interface ResizableHeaderProps { + onResize: (e: SyntheticEvent, data: ResizeCallbackData) => void; + width: number; +} + +export default ResizableHeader; diff --git a/signoz/frontend/src/components/ResizeTable/ResizeTable.tsx b/signoz/frontend/src/components/ResizeTable/ResizeTable.tsx new file mode 100644 index 0000000..5f8ac7a --- /dev/null +++ b/signoz/frontend/src/components/ResizeTable/ResizeTable.tsx @@ -0,0 +1,98 @@ +/* eslint-disable react/jsx-props-no-spreading */ + +import { Table } from 'antd'; +import { ColumnsType } from 'antd/lib/table'; +import { dragColumnParams } from 'hooks/useDragColumns/configs'; +import { set } from 'lodash-es'; +import { + SyntheticEvent, + useCallback, + useEffect, + useMemo, + useState, +} from 'react'; +import ReactDragListView from 'react-drag-listview'; +import { ResizeCallbackData } from 'react-resizable'; + +import ResizableHeader from './ResizableHeader'; +import { DragSpanStyle } from './styles'; +import { ResizeTableProps } from './types'; + +function ResizeTable({ + columns, + onDragColumn, + pagination, + ...restProps +}: ResizeTableProps): JSX.Element { + const [columnsData, setColumns] = useState([]); + + const handleResize = useCallback( + (index: number) => ( + _e: SyntheticEvent, + { size }: ResizeCallbackData, + ): void => { + const newColumns = [...columnsData]; + newColumns[index] = { + ...newColumns[index], + width: size.width, + }; + setColumns(newColumns); + }, + [columnsData], + ); + + const mergedColumns = useMemo( + () => + columnsData.map((col, index) => ({ + ...col, + ...(onDragColumn && { + title: ( + + {col?.title?.toString() || ''} + + ), + }), + onHeaderCell: (column: ColumnsType[number]): unknown => ({ + width: column.width, + onResize: handleResize(index), + }), + })) as ColumnsType, + [columnsData, onDragColumn, handleResize], + ); + + const tableParams = useMemo(() => { + const props = { + ...restProps, + components: { header: { cell: ResizableHeader } }, + columns: mergedColumns, + }; + + set( + props, + 'pagination', + pagination ? { ...pagination, hideOnSinglePage: true } : false, + ); + + return props; + }, [mergedColumns, pagination, restProps]); + + useEffect(() => { + if (columns) { + setColumns(columns); + } + }, [columns]); + + return onDragColumn ? ( + + + + ) : ( +
+ ); +} + +ResizeTable.defaultProps = { + onDragColumn: undefined, +}; + +export default ResizeTable; diff --git a/signoz/frontend/src/components/ResizeTable/TableComponent/DateComponent.tsx b/signoz/frontend/src/components/ResizeTable/TableComponent/DateComponent.tsx new file mode 100644 index 0000000..87c5c57 --- /dev/null +++ b/signoz/frontend/src/components/ResizeTable/TableComponent/DateComponent.tsx @@ -0,0 +1,15 @@ +import { Typography } from 'antd'; + +import Time from './Time'; + +function DateComponent( + CreatedOrUpdateTime: string | number | Date, +): JSX.Element { + if (CreatedOrUpdateTime === null) { + return - ; + } + + return Creator + + + {APIKey?.createdByUser?.name?.substring(0, 1)} + + + {APIKey.createdByUser?.name} + +
{APIKey.createdByUser?.email}
+ + + )} + + Created on + + {createdOn} + + + {updatedOn && ( + + Updated on + + {updatedOn} + + + )} + + + Expires on + + {expiresOn} + + + + ), + }, + ]; + + return ( +
+ + +
+
+ + Last used + {formattedDateAndTime} +
+ + {!isExpired && expiresIn <= EXPIRATION_WITHIN_SEVEN_DAYS && ( +
+ Expires in {expiresIn} Days +
+ )} + + {isExpired && ( +
+ Expired +
+ )} +
+
+ ); + }, + }, + ]; + + return ( +
+
+
+ Access Tokens + + Create and manage access tokens for the SigNoz API + +
+ +
+ } + value={searchValue} + onChange={handleSearch} + /> + + +
+ +
+ `${range[0]}-${range[1]} of ${total} tokens`, + }} + /> + + + {/* Delete Key Modal */} + Delete Token} + open={isDeleteModalOpen} + closable + afterClose={handleModalClose} + onCancel={hideDeleteViewModal} + destroyOnClose + footer={[ + , + , + ]} + > + + {t('delete_confirm_message', { + keyName: activeAPIKey?.name, + })} + + + + {/* Edit Key Modal */} + } + > + Cancel + , + , + ]} + > +
+ + + + + + + + +
+ Admin +
+
+ +
+ Editor +
+
+ +
+ Viewer +
+
+
+
+
+ +
+ + {/* Create New Key Modal */} + } + > + Copy token and close + , + ] + : [ + , + , + ] + } + > + {!showNewAPIKeyDetails && ( +
+ + + + + + + + +
+ Admin +
+
+ +
+ Editor +
+
+ +
+ Viewer +
+
+
+
+
+ + setSelectedKeys(e.target.value ? [e.target.value] : []) + + // Need to fix this logic, when the value in empty, it's setting undefined string as value + } + allowClear + defaultValue={getDefaultFilterValue( + filterKey, + getUpdatedServiceName, + getUpdatedExceptionType, + )} + onPressEnter={handleSearch(confirm, String(selectedKeys[0]), filterKey)} + /> + + + + ), + [getUpdatedExceptionType, getUpdatedServiceName, handleSearch], + ); + + const onExceptionTypeFilter: ColumnType['onFilter'] = useCallback( + (value: unknown, record: Exception): boolean => { + if (record.exceptionType && typeof value === 'string') { + return record.exceptionType.toLowerCase().includes(value.toLowerCase()); + } + return false; + }, + [], + ); + + const onApplicationTypeFilter = useCallback( + (value: unknown, record: Exception): boolean => { + if (record.serviceName && typeof value === 'string') { + return record.serviceName.toLowerCase().includes(value.toLowerCase()); + } + return false; + }, + [], + ); + + const getFilter = useCallback( + ( + onFilter: ColumnType['onFilter'], + placeholder: string, + filterKey: string, + ): ColumnType => ({ + onFilter, + filterIcon, + filterDropdown: ({ confirm, selectedKeys, setSelectedKeys }): JSX.Element => + filterDropdownWrapper({ + setSelectedKeys, + selectedKeys, + confirm, + placeholder, + filterKey, + }), + }), + [filterIcon, filterDropdownWrapper], + ); + + const columns: ColumnsType = [ + { + title: 'Exception Type', + width: 100, + dataIndex: 'exceptionType', + key: 'exceptionType', + ...getFilter(onExceptionTypeFilter, 'Search By Exception', 'exceptionType'), + render: (value, record): JSX.Element => ( + value}> + + {value} + + + ), + sorter: true, + defaultSortOrder: getDefaultOrder( + getUpdatedParams, + updatedOrder, + 'exceptionType', + ), + }, + { + title: 'Error Message', + dataIndex: 'exceptionMessage', + key: 'exceptionMessage', + width: 100, + render: (value): JSX.Element => ( + value}> + + {value} + + + ), + }, + { + title: 'Count', + width: 50, + dataIndex: 'exceptionCount', + key: 'exceptionCount', + sorter: true, + defaultSortOrder: getDefaultOrder( + getUpdatedParams, + updatedOrder, + 'exceptionCount', + ), + }, + { + title: 'Last Seen', + dataIndex: 'lastSeen', + width: 80, + key: 'lastSeen', + render: getDateValue, + sorter: true, + defaultSortOrder: getDefaultOrder( + getUpdatedParams, + updatedOrder, + 'lastSeen', + ), + }, + { + title: 'First Seen', + dataIndex: 'firstSeen', + width: 80, + key: 'firstSeen', + render: getDateValue, + sorter: true, + defaultSortOrder: getDefaultOrder( + getUpdatedParams, + updatedOrder, + 'firstSeen', + ), + }, + { + title: 'Application', + dataIndex: 'serviceName', + width: 100, + key: 'serviceName', + sorter: true, + defaultSortOrder: getDefaultOrder( + getUpdatedParams, + updatedOrder, + 'serviceName', + ), + ...getFilter( + onApplicationTypeFilter, + 'Search By Application', + 'serviceName', + ), + }, + ]; + + const onChangeHandler: TableProps['onChange'] = useCallback( + ( + paginations: TablePaginationConfig, + filters: Record, + sorter: SorterResult[] | SorterResult, + ) => { + if (!Array.isArray(sorter)) { + const { pageSize = 0, current = 0 } = paginations; + const { columnKey = '', order } = sorter; + const updatedOrder = order === 'ascend' ? 'ascending' : 'descending'; + const params = new URLSearchParams(window.location.search); + const { exceptionType, serviceName } = extractFilterValues(filters, { + serviceName: getFilterString(params.get(urlKey.serviceName)), + exceptionType: getFilterString(params.get(urlKey.exceptionType)), + }); + history.replace( + `${pathname}?${createQueryParams({ + order: updatedOrder, + offset: (current - 1) * pageSize, + orderParam: columnKey, + pageSize, + exceptionType, + serviceName, + })}`, + ); + } + }, + [pathname], + ); + + const logEventCalledRef = useRef(false); + useEffect(() => { + if ( + !logEventCalledRef.current && + !isUndefined(errorCountResponse.data?.payload) + ) { + const selectedEnvironments = queries.find( + (val) => val.tagKey === 'resource_deployment_environment', + )?.tagValue; + + logEvent('Exception: List page visited', { + numberOfExceptions: errorCountResponse?.data?.payload, + selectedEnvironments, + resourceAttributeUsed: !!queries?.length, + }); + logEventCalledRef.current = true; + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [errorCountResponse.data?.payload]); + + return ( + + ); +} + +export default AllErrors; diff --git a/signoz/frontend/src/container/AllError/types.ts b/signoz/frontend/src/container/AllError/types.ts new file mode 100644 index 0000000..81ce367 --- /dev/null +++ b/signoz/frontend/src/container/AllError/types.ts @@ -0,0 +1,9 @@ +import { FilterDropdownProps } from 'antd/es/table/interface'; + +export interface FilterDropdownExtendsProps { + placeholder: string; + filterKey: string; + confirm: FilterDropdownProps['confirm']; + setSelectedKeys: FilterDropdownProps['setSelectedKeys']; + selectedKeys: FilterDropdownProps['selectedKeys']; +} diff --git a/signoz/frontend/src/container/AllError/utils.test.ts b/signoz/frontend/src/container/AllError/utils.test.ts new file mode 100644 index 0000000..344d318 --- /dev/null +++ b/signoz/frontend/src/container/AllError/utils.test.ts @@ -0,0 +1,109 @@ +import { Order, OrderBy } from 'types/api/errors/getAll'; + +import { + getDefaultOrder, + getLimit, + getOffSet, + getOrder, + getOrderParams, + getUpdatePageSize, + isOrder, + isOrderParams, +} from './utils'; + +describe('Error utils', () => { + test('Valid OrderBy Params', () => { + expect(isOrderParams('serviceName')).toBe(true); + expect(isOrderParams('exceptionCount')).toBe(true); + expect(isOrderParams('lastSeen')).toBe(true); + expect(isOrderParams('firstSeen')).toBe(true); + expect(isOrderParams('exceptionType')).toBe(true); + }); + + test('Invalid OrderBy Params', () => { + expect(isOrderParams('invalid')).toBe(false); + expect(isOrderParams(null)).toBe(false); + expect(isOrderParams('')).toBe(false); + }); + + test('Valid Order', () => { + expect(isOrder('ascending')).toBe(true); + expect(isOrder('descending')).toBe(true); + }); + + test('Invalid Order', () => { + expect(isOrder('invalid')).toBe(false); + expect(isOrder(null)).toBe(false); + expect(isOrder('')).toBe(false); + }); + + test('Default Order', () => { + const OrderBy: OrderBy[] = [ + 'exceptionCount', + 'exceptionType', + 'firstSeen', + 'lastSeen', + 'serviceName', + ]; + + const order: Order[] = ['ascending', 'descending']; + + const ascOrd = order[0]; + const desOrd = order[1]; + + OrderBy.forEach((order) => { + expect(getDefaultOrder(order, ascOrd, order)).toBe('ascend'); + expect(getDefaultOrder(order, desOrd, order)).toBe('descend'); + }); + }); + + test('Limit', () => { + expect(getLimit(null)).toBe(10); + expect(getLimit('')).toBe(10); + expect(getLimit('0')).toBe(0); + expect(getLimit('1')).toBe(1); + expect(getLimit('10')).toBe(10); + expect(getLimit('11')).toBe(11); + expect(getLimit('100')).toBe(100); + expect(getLimit('101')).toBe(101); + }); + + test('Update Page Size', () => { + expect(getUpdatePageSize(null)).toBe(10); + expect(getUpdatePageSize('')).toBe(10); + expect(getUpdatePageSize('0')).toBe(0); + expect(getUpdatePageSize('1')).toBe(1); + expect(getUpdatePageSize('10')).toBe(10); + expect(getUpdatePageSize('11')).toBe(11); + expect(getUpdatePageSize('100')).toBe(100); + expect(getUpdatePageSize('101')).toBe(101); + }); + + test('Order Params', () => { + expect(getOrderParams(null)).toBe('serviceName'); + expect(getOrderParams('')).toBe('serviceName'); + expect(getOrderParams('serviceName')).toBe('serviceName'); + expect(getOrderParams('exceptionCount')).toBe('exceptionCount'); + expect(getOrderParams('lastSeen')).toBe('lastSeen'); + expect(getOrderParams('firstSeen')).toBe('firstSeen'); + expect(getOrderParams('exceptionType')).toBe('exceptionType'); + }); + + test('OffSet', () => { + expect(getOffSet(null)).toBe(0); + expect(getOffSet('')).toBe(0); + expect(getOffSet('0')).toBe(0); + expect(getOffSet('1')).toBe(1); + expect(getOffSet('10')).toBe(10); + expect(getOffSet('11')).toBe(11); + expect(getOffSet('100')).toBe(100); + expect(getOffSet('101')).toBe(101); + }); + + test('Order', () => { + expect(getOrder(null)).toBe('ascending'); + expect(getOrder('')).toBe('ascending'); + expect(getOrder('ascending')).toBe('ascending'); + expect(getOrder('descending')).toBe('descending'); + }); +}); diff --git a/signoz/frontend/src/container/AllError/utils.ts b/signoz/frontend/src/container/AllError/utils.ts new file mode 100644 index 0000000..c13cd8f --- /dev/null +++ b/signoz/frontend/src/container/AllError/utils.ts @@ -0,0 +1,184 @@ +import { FilterValue, SortOrder } from 'antd/lib/table/interface'; +import Timestamp from 'timestamp-nano'; +import { Order, OrderBy } from 'types/api/errors/getAll'; + +import { + DEFAULT_FILTER_VALUE, + EXCEPTION_TYPE_FILTER_NAME, + SERVICE_NAME_FILTER_NAME, +} from './constant'; + +export const isOrder = (order: string | null): order is Order => + !!(order === 'ascending' || order === 'descending'); + +export const urlKey = { + order: 'order', + offset: 'offset', + orderParam: 'orderParam', + pageSize: 'pageSize', + exceptionType: 'exceptionType', + serviceName: 'serviceName', +}; + +export const isOrderParams = (orderBy: string | null): orderBy is OrderBy => + !!( + orderBy === 'serviceName' || + orderBy === 'exceptionCount' || + orderBy === 'lastSeen' || + orderBy === 'firstSeen' || + orderBy === 'exceptionType' + ); + +export const getOrder = (order: string | null): Order => { + if (isOrder(order)) { + return order; + } + return 'ascending'; +}; + +export const getLimit = (limit: string | null): number => { + if (limit) { + return parseInt(limit, 10); + } + return 10; +}; + +export const getOffSet = (offset: string | null): number => { + if (offset && typeof offset === 'string') { + return parseInt(offset, 10); + } + return 0; +}; + +export const getOrderParams = (order: string | null): OrderBy => { + if (isOrderParams(order)) { + return order; + } + return 'serviceName'; +}; + +export const getDefaultOrder = ( + orderBy: OrderBy, + order: Order, + data: OrderBy, + // eslint-disable-next-line sonarjs/cognitive-complexity +): SortOrder | undefined => { + if (orderBy === 'exceptionType' && data === 'exceptionType') { + return order === 'ascending' ? 'ascend' : 'descend'; + } + if (orderBy === 'serviceName' && data === 'serviceName') { + return order === 'ascending' ? 'ascend' : 'descend'; + } + if (orderBy === 'exceptionCount' && data === 'exceptionCount') { + return order === 'ascending' ? 'ascend' : 'descend'; + } + if (orderBy === 'lastSeen' && data === 'lastSeen') { + return order === 'ascending' ? 'ascend' : 'descend'; + } + if (orderBy === 'firstSeen' && data === 'firstSeen') { + return order === 'ascending' ? 'ascend' : 'descend'; + } + return undefined; +}; + +export const getNanoSeconds = (date: string): string => + Math.floor(new Date(date).getTime() / 1e3).toString() + + String(Timestamp.fromString(date).getNano().toString()).padStart(9, '0'); + +export const getUpdatePageSize = (pageSize: string | null): number => { + if (pageSize) { + return parseInt(pageSize, 10); + } + return 10; +}; + +export const getFilterString = (filter: string | null): string => { + if (filter) { + return filter; + } + return ''; +}; + +export const getDefaultFilterValue = ( + filterKey: string | null, + serviceName: string, + exceptionType: string, +): string | undefined => { + let defaultValue: string | undefined; + switch (filterKey) { + case SERVICE_NAME_FILTER_NAME: + defaultValue = serviceName; + break; + case EXCEPTION_TYPE_FILTER_NAME: + defaultValue = exceptionType; + break; + default: + break; + } + return defaultValue; +}; + +export const getFilterValues = ( + serviceName: string, + exceptionType: string, + filterKey: string, + filterValue: string, +): { exceptionFilterValue: string; serviceFilterValue: string } => { + let serviceFilterValue = serviceName; + let exceptionFilterValue = exceptionType; + switch (filterKey) { + case EXCEPTION_TYPE_FILTER_NAME: + exceptionFilterValue = filterValue; + break; + case SERVICE_NAME_FILTER_NAME: + serviceFilterValue = filterValue; + break; + default: + break; + } + return { exceptionFilterValue, serviceFilterValue }; +}; + +type FilterValues = { exceptionType: string; serviceName: string }; + +const extractSingleFilterValue = ( + filterName: string, + filters: Filter, +): string => { + const filterValues = filters[filterName]; + + if ( + !filterValues || + !Array.isArray(filterValues) || + filterValues.length === 0 + ) { + return DEFAULT_FILTER_VALUE; + } + + return String(filterValues[0]); +}; + +type Filter = Record; + +export const extractFilterValues = ( + filters: Filter, + prefilledFilters: FilterValues, +): FilterValues => { + const filterValues: FilterValues = { + exceptionType: prefilledFilters.exceptionType, + serviceName: prefilledFilters.serviceName, + }; + if (filters[EXCEPTION_TYPE_FILTER_NAME]) { + filterValues.exceptionType = extractSingleFilterValue( + EXCEPTION_TYPE_FILTER_NAME, + filters, + ); + } + if (filters[SERVICE_NAME_FILTER_NAME]) { + filterValues.serviceName = extractSingleFilterValue( + SERVICE_NAME_FILTER_NAME, + filters, + ); + } + return filterValues; +}; diff --git a/signoz/frontend/src/container/AppLayout/AppLayout.styles.scss b/signoz/frontend/src/container/AppLayout/AppLayout.styles.scss new file mode 100644 index 0000000..c789ba5 --- /dev/null +++ b/signoz/frontend/src/container/AppLayout/AppLayout.styles.scss @@ -0,0 +1,67 @@ +.app-layout { + position: relative; + height: 100%; + width: 100%; + + .app-content { + width: calc(100% - 64px); + z-index: 0; + + .content-container { + position: relative; + margin: 0 1rem; + display: flex; + flex-direction: column; + height: 100%; + width: 100%; + } + } + + &.docked { + .app-content { + width: calc(100% - 240px); + } + } +} + +.isDarkMode { + .app-layout { + .app-content { + background: #0b0c0e; + } + } +} + +.isLightMode { + .app-layout { + .app-content { + background: #ffffff; + } + } +} + +.trial-expiry-banner { + padding: 8px; + background-color: #f25733; + color: white; + text-align: center; +} + +.upgrade-link { + padding: 0px; + padding-right: 4px; + display: inline !important; + color: white; + text-decoration: underline; + text-decoration-color: white; + text-decoration-thickness: 2px; + text-underline-offset: 2px; + + &:hover { + color: white; + text-decoration: underline; + text-decoration-color: white; + text-decoration-thickness: 2px; + text-underline-offset: 2px; + } +} diff --git a/signoz/frontend/src/container/AppLayout/index.tsx b/signoz/frontend/src/container/AppLayout/index.tsx new file mode 100644 index 0000000..ba64e9a --- /dev/null +++ b/signoz/frontend/src/container/AppLayout/index.tsx @@ -0,0 +1,342 @@ +/* eslint-disable jsx-a11y/no-static-element-interactions */ +/* eslint-disable jsx-a11y/click-events-have-key-events */ +/* eslint-disable jsx-a11y/anchor-is-valid */ +import './AppLayout.styles.scss'; + +import * as Sentry from '@sentry/react'; +import { Flex } from 'antd'; +import getLocalStorageKey from 'api/browser/localstorage/get'; +import getUserLatestVersion from 'api/user/getLatestVersion'; +import getUserVersion from 'api/user/getVersion'; +import cx from 'classnames'; +import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar'; +import { IS_SIDEBAR_COLLAPSED } from 'constants/app'; +import ROUTES from 'constants/routes'; +import SideNav from 'container/SideNav'; +import TopNav from 'container/TopNav'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import useLicense from 'hooks/useLicense'; +import { useNotifications } from 'hooks/useNotifications'; +import history from 'lib/history'; +import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback'; +import { + ReactNode, + useCallback, + useEffect, + useLayoutEffect, + useMemo, + useRef, + useState, +} from 'react'; +import { Helmet } from 'react-helmet-async'; +import { useTranslation } from 'react-i18next'; +import { useQueries } from 'react-query'; +import { useDispatch, useSelector } from 'react-redux'; +import { useLocation } from 'react-router-dom'; +import { Dispatch } from 'redux'; +import { sideBarCollapse } from 'store/actions'; +import { AppState } from 'store/reducers'; +import AppActions from 'types/actions'; +import { + UPDATE_CURRENT_ERROR, + UPDATE_CURRENT_VERSION, + UPDATE_LATEST_VERSION, + UPDATE_LATEST_VERSION_ERROR, +} from 'types/actions/app'; +import AppReducer from 'types/reducer/app'; +import { getFormattedDate, getRemainingDays } from 'utils/timeUtils'; + +import { ChildrenContainer, Layout, LayoutContent } from './styles'; +import { getRouteKey } from './utils'; + +function AppLayout(props: AppLayoutProps): JSX.Element { + const { isLoggedIn, user, role } = useSelector( + (state) => state.app, + ); + + const [collapsed, setCollapsed] = useState( + getLocalStorageKey(IS_SIDEBAR_COLLAPSED) === 'true', + ); + + const isDarkMode = useIsDarkMode(); + + const { data: licenseData, isFetching } = useLicense(); + + const { pathname } = useLocation(); + const { t } = useTranslation(['titles']); + + const [getUserVersionResponse, getUserLatestVersionResponse] = useQueries([ + { + queryFn: getUserVersion, + queryKey: ['getUserVersion', user?.accessJwt], + enabled: isLoggedIn, + }, + { + queryFn: getUserLatestVersion, + queryKey: ['getUserLatestVersion', user?.accessJwt], + enabled: isLoggedIn, + }, + ]); + + useEffect(() => { + if (getUserLatestVersionResponse.status === 'idle' && isLoggedIn) { + getUserLatestVersionResponse.refetch(); + } + + if (getUserVersionResponse.status === 'idle' && isLoggedIn) { + getUserVersionResponse.refetch(); + } + }, [getUserLatestVersionResponse, getUserVersionResponse, isLoggedIn]); + + const { children } = props; + + const dispatch = useDispatch>(); + + const latestCurrentCounter = useRef(0); + const latestVersionCounter = useRef(0); + + const { notifications } = useNotifications(); + + const onCollapse = useCallback(() => { + setCollapsed((collapsed) => !collapsed); + }, []); + + useLayoutEffect(() => { + dispatch(sideBarCollapse(collapsed)); + }, [collapsed, dispatch]); + + useEffect(() => { + if ( + getUserLatestVersionResponse.isFetched && + getUserLatestVersionResponse.isError && + latestCurrentCounter.current === 0 + ) { + latestCurrentCounter.current = 1; + + dispatch({ + type: UPDATE_LATEST_VERSION_ERROR, + payload: { + isError: true, + }, + }); + notifications.error({ + message: t('oops_something_went_wrong_version'), + }); + } + + if ( + getUserVersionResponse.isFetched && + getUserVersionResponse.isError && + latestVersionCounter.current === 0 + ) { + latestVersionCounter.current = 1; + + dispatch({ + type: UPDATE_CURRENT_ERROR, + payload: { + isError: true, + }, + }); + notifications.error({ + message: t('oops_something_went_wrong_version'), + }); + } + + if ( + getUserVersionResponse.isFetched && + getUserLatestVersionResponse.isSuccess && + getUserVersionResponse.data && + getUserVersionResponse.data.payload + ) { + dispatch({ + type: UPDATE_CURRENT_VERSION, + payload: { + currentVersion: getUserVersionResponse.data.payload.version, + ee: getUserVersionResponse.data.payload.ee, + setupCompleted: getUserVersionResponse.data.payload.setupCompleted, + }, + }); + } + + if ( + getUserLatestVersionResponse.isFetched && + getUserLatestVersionResponse.isSuccess && + getUserLatestVersionResponse.data && + getUserLatestVersionResponse.data.payload + ) { + dispatch({ + type: UPDATE_LATEST_VERSION, + payload: { + latestVersion: getUserLatestVersionResponse.data.payload.tag_name, + }, + }); + } + }, [ + dispatch, + isLoggedIn, + pathname, + t, + getUserLatestVersionResponse.isLoading, + getUserLatestVersionResponse.isError, + getUserLatestVersionResponse.data, + getUserVersionResponse.isLoading, + getUserVersionResponse.isError, + getUserVersionResponse.data, + getUserLatestVersionResponse.isFetched, + getUserVersionResponse.isFetched, + getUserLatestVersionResponse.isSuccess, + notifications, + ]); + + const isToDisplayLayout = isLoggedIn; + + const routeKey = useMemo(() => getRouteKey(pathname), [pathname]); + const pageTitle = t(routeKey); + const renderFullScreen = + pathname === ROUTES.GET_STARTED || + pathname === ROUTES.WORKSPACE_LOCKED || + pathname === ROUTES.GET_STARTED_APPLICATION_MONITORING || + pathname === ROUTES.GET_STARTED_INFRASTRUCTURE_MONITORING || + pathname === ROUTES.GET_STARTED_LOGS_MANAGEMENT || + pathname === ROUTES.GET_STARTED_AWS_MONITORING || + pathname === ROUTES.GET_STARTED_AZURE_MONITORING; + + const [showTrialExpiryBanner, setShowTrialExpiryBanner] = useState(false); + + useEffect(() => { + if ( + !isFetching && + licenseData?.payload?.onTrial && + !licenseData?.payload?.trialConvertedToSubscription && + !licenseData?.payload?.workSpaceBlock && + getRemainingDays(licenseData?.payload.trialEnd) < 7 + ) { + setShowTrialExpiryBanner(true); + } + }, [licenseData, isFetching]); + + const handleUpgrade = (): void => { + if (role === 'ADMIN') { + history.push(ROUTES.BILLING); + } + }; + + const isLogsView = (): boolean => + routeKey === 'LOGS' || + routeKey === 'LOGS_EXPLORER' || + routeKey === 'LOGS_PIPELINES' || + routeKey === 'LOGS_SAVE_VIEWS'; + + const isTracesView = (): boolean => + routeKey === 'TRACES_EXPLORER' || routeKey === 'TRACES_SAVE_VIEWS'; + + const isDashboardListView = (): boolean => routeKey === 'ALL_DASHBOARD'; + const isDashboardView = (): boolean => { + /** + * need to match using regex here as the getRoute function will not work for + * routes with id + */ + const regex = /^\/dashboard\/[a-zA-Z0-9_-]+$/; + return regex.test(pathname); + }; + + const isDashboardWidgetView = (): boolean => { + const regex = /^\/dashboard\/[a-zA-Z0-9_-]+\/new$/; + return regex.test(pathname); + }; + + useEffect(() => { + if (isDarkMode) { + document.body.classList.remove('lightMode'); + document.body.classList.add('darkMode'); + } else { + document.body.classList.add('lightMode'); + document.body.classList.remove('darkMode'); + } + }, [isDarkMode]); + + const isSideNavCollapsed = getLocalStorageKey(IS_SIDEBAR_COLLAPSED); + + return ( + + + {pageTitle} + + + {showTrialExpiryBanner && ( +
+ You are in free trial period. Your free trial will end on{' '} + + {getFormattedDate(licenseData?.payload?.trialEnd || Date.now())}. + + {role === 'ADMIN' ? ( + + {' '} + Please{' '} + + upgrade + + to continue using SigNoz features. + + ) : ( + 'Please contact your administrator for upgrading to a paid plan.' + )} +
+ )} + + + {isToDisplayLayout && !renderFullScreen && ( + + )} +
+ }> + + + + {isToDisplayLayout && !renderFullScreen && } + {children} + + + + +
+
+
+ ); +} + +interface AppLayoutProps { + children: ReactNode; +} + +export default AppLayout; diff --git a/signoz/frontend/src/container/AppLayout/styles.ts b/signoz/frontend/src/container/AppLayout/styles.ts new file mode 100644 index 0000000..c66d2ee --- /dev/null +++ b/signoz/frontend/src/container/AppLayout/styles.ts @@ -0,0 +1,26 @@ +import { Layout as LayoutComponent } from 'antd'; +import styled from 'styled-components'; + +export const Layout = styled(LayoutComponent)` + &&& { + display: flex; + position: relative; + min-height: calc(100vh - 8rem); + overflow: hidden; + height: 100%; + flex-direction: column !important; + } +`; + +export const LayoutContent = styled(LayoutComponent.Content)` + height: 100%; + &::-webkit-scrollbar { + width: 0.1rem; + } +`; + +export const ChildrenContainer = styled.div` + display: flex; + flex-direction: column; + height: 100%; +`; diff --git a/signoz/frontend/src/container/AppLayout/utils.ts b/signoz/frontend/src/container/AppLayout/utils.ts new file mode 100644 index 0000000..649dcc4 --- /dev/null +++ b/signoz/frontend/src/container/AppLayout/utils.ts @@ -0,0 +1,9 @@ +import ROUTES from 'constants/routes'; + +export function getRouteKey(pathname: string): string { + const [routeKey] = Object.entries(ROUTES).find( + ([, value]) => value === pathname, + ) || ['DEFAULT']; + + return routeKey; +} diff --git a/signoz/frontend/src/container/BillingContainer/BillingContainer.styles.scss b/signoz/frontend/src/container/BillingContainer/BillingContainer.styles.scss new file mode 100644 index 0000000..e4c7dee --- /dev/null +++ b/signoz/frontend/src/container/BillingContainer/BillingContainer.styles.scss @@ -0,0 +1,79 @@ +.billing-container { + margin-bottom: 40px; + padding-top: 36px; + width: 65%; + + .billing-summary { + margin: 24px 8px; + } + + .billing-details { + margin: 24px 0px; + + .ant-table-title { + color: var(--bg-vanilla-400); + background-color: rgb(27, 28, 32); + } + + .ant-table-cell { + background-color: var(--bg-ink-400); + border-color: var(--bg-slate-500); + } + + .ant-table-tbody { + td { + border-color: var(--bg-slate-500); + } + } + } + + .upgrade-plan-benefits { + margin: 0px 8px; + border: 1px solid #333; + border-radius: 5px; + padding: 0 48px; + .plan-benefits { + .plan-benefit { + display: flex; + align-items: center; + gap: 16px; + margin: 16px 0; + } + } + } + + .empty-graph-card { + .ant-card-body { + height: 40vh; + display: flex; + justify-content: center; + align-items: center; + } + } +} + +.ant-skeleton.ant-skeleton-element.ant-skeleton-active { + width: 100%; + min-width: 100%; +} + +.ant-skeleton.ant-skeleton-element .ant-skeleton-input { + min-width: 100% !important; +} + +.lightMode { + .billing-container { + .billing-details { + .ant-table-cell { + background: var(--bg-vanilla-100); + border-color: var(--bg-vanilla-200); + } + + .ant-table-tbody { + td { + border-color: var(--bg-vanilla-200); + } + } + } + } +} diff --git a/signoz/frontend/src/container/BillingContainer/BillingContainer.test.tsx b/signoz/frontend/src/container/BillingContainer/BillingContainer.test.tsx new file mode 100644 index 0000000..1988df3 --- /dev/null +++ b/signoz/frontend/src/container/BillingContainer/BillingContainer.test.tsx @@ -0,0 +1,184 @@ +import { billingSuccessResponse } from 'mocks-server/__mockdata__/billing'; +import { + notOfTrailResponse, + trialConvertedToSubscriptionResponse, +} from 'mocks-server/__mockdata__/licenses'; +import { server } from 'mocks-server/server'; +import { rest } from 'msw'; +import { act, render, screen } from 'tests/test-utils'; +import { getFormattedDate } from 'utils/timeUtils'; + +import BillingContainer from './BillingContainer'; + +const lisenceUrl = 'http://localhost/api/v2/licenses'; + +jest.mock('uplot', () => { + const paths = { + spline: jest.fn(), + bars: jest.fn(), + }; + + const uplotMock = jest.fn(() => ({ + paths, + })); + + return { + paths, + default: uplotMock, + }; +}); + +window.ResizeObserver = + window.ResizeObserver || + jest.fn().mockImplementation(() => ({ + disconnect: jest.fn(), + observe: jest.fn(), + unobserve: jest.fn(), + })); + +describe('BillingContainer', () => { + test('Component should render', async () => { + act(() => { + render(); + }); + + const dataInjection = screen.getByRole('columnheader', { + name: /data ingested/i, + }); + expect(dataInjection).toBeInTheDocument(); + const pricePerUnit = screen.getByRole('columnheader', { + name: /price per unit/i, + }); + expect(pricePerUnit).toBeInTheDocument(); + const cost = screen.getByRole('columnheader', { + name: /cost \(billing period to date\)/i, + }); + expect(cost).toBeInTheDocument(); + + const manageBilling = screen.getByRole('button', { + name: 'manage_billing', + }); + expect(manageBilling).toBeInTheDocument(); + + const dollar = screen.getByText(/\$0/i); + expect(dollar).toBeInTheDocument(); + + const currentBill = screen.getByText('billing'); + expect(currentBill).toBeInTheDocument(); + }); + + test('OnTrail', async () => { + act(() => { + render(); + }); + + const freeTrailText = await screen.findByText('Free Trial'); + expect(freeTrailText).toBeInTheDocument(); + + const currentBill = screen.getByText('billing'); + expect(currentBill).toBeInTheDocument(); + + const dollar0 = await screen.findByText(/\$0/i); + expect(dollar0).toBeInTheDocument(); + const onTrail = await screen.findByText( + /You are in free trial period. Your free trial will end on 20 Oct 2023/i, + ); + expect(onTrail).toBeInTheDocument(); + + const numberOfDayRemaining = await screen.findByText(/1 days_remaining/i); + expect(numberOfDayRemaining).toBeInTheDocument(); + const upgradeButton = await screen.findAllByRole('button', { + name: /upgrade_plan/i, + }); + expect(upgradeButton[1]).toBeInTheDocument(); + expect(upgradeButton.length).toBe(2); + const checkPaidPlan = await screen.findByText(/checkout_plans/i); + expect(checkPaidPlan).toBeInTheDocument(); + + const link = screen.getByRole('link', { name: /here/i }); + expect(link).toBeInTheDocument(); + }); + + test('OnTrail but trialConvertedToSubscription', async () => { + server.use( + rest.get(lisenceUrl, (req, res, ctx) => + res(ctx.status(200), ctx.json(trialConvertedToSubscriptionResponse)), + ), + ); + + act(() => { + render(); + }); + + const currentBill = screen.getByText('billing'); + expect(currentBill).toBeInTheDocument(); + + const dollar0 = await screen.findByText(/\$0/i); + expect(dollar0).toBeInTheDocument(); + + const onTrail = await screen.findByText( + /You are in free trial period. Your free trial will end on 20 Oct 2023/i, + ); + expect(onTrail).toBeInTheDocument(); + + const receivedCardDetails = await screen.findByText( + /card_details_recieved_and_billing_info/i, + ); + expect(receivedCardDetails).toBeInTheDocument(); + + const manageBillingButton = await screen.findByRole('button', { + name: /manage_billing/i, + }); + expect(manageBillingButton).toBeInTheDocument(); + + const dayRemainingInBillingPeriod = await screen.findByText( + /1 days_remaining/i, + ); + expect(dayRemainingInBillingPeriod).toBeInTheDocument(); + }); + + test('Not on ontrail', async () => { + server.use( + rest.get(lisenceUrl, (req, res, ctx) => + res(ctx.status(200), ctx.json(notOfTrailResponse)), + ), + ); + const { findByText } = render(); + + const billingPeriodText = `Your current billing period is from ${getFormattedDate( + billingSuccessResponse.data.billingPeriodStart, + )} to ${getFormattedDate(billingSuccessResponse.data.billingPeriodEnd)}`; + + const billingPeriod = await findByText(billingPeriodText); + expect(billingPeriod).toBeInTheDocument(); + + const currentBill = screen.getByText('billing'); + expect(currentBill).toBeInTheDocument(); + + const dollar0 = await screen.findByText(/\$1,278.3/i); + expect(dollar0).toBeInTheDocument(); + + const metricsRow = await screen.findByRole('row', { + name: /metrics 4012 Million 0.1 \$ 401.2/i, + }); + expect(metricsRow).toBeInTheDocument(); + + const logRow = await screen.findByRole('row', { + name: /Logs 497 GB 0.4 \$ 198.8/i, + }); + expect(logRow).toBeInTheDocument(); + }); + + test('Should render corrent day remaining in billing period', async () => { + server.use( + rest.get(lisenceUrl, (req, res, ctx) => + res(ctx.status(200), ctx.json(notOfTrailResponse)), + ), + ); + render(); + const dayRemainingInBillingPeriod = await screen.findByText( + /11 days_remaining/i, + ); + expect(dayRemainingInBillingPeriod).toBeInTheDocument(); + }); +}); diff --git a/signoz/frontend/src/container/BillingContainer/BillingContainer.tsx b/signoz/frontend/src/container/BillingContainer/BillingContainer.tsx new file mode 100644 index 0000000..e366f06 --- /dev/null +++ b/signoz/frontend/src/container/BillingContainer/BillingContainer.tsx @@ -0,0 +1,559 @@ +/* eslint-disable @typescript-eslint/no-loop-func */ +import './BillingContainer.styles.scss'; + +import { CheckCircleOutlined, CloudDownloadOutlined } from '@ant-design/icons'; +import { Color } from '@signozhq/design-tokens'; +import { + Alert, + Button, + Card, + Col, + Flex, + Row, + Skeleton, + Table, + Tag, + Typography, +} from 'antd'; +import { ColumnsType } from 'antd/es/table'; +import updateCreditCardApi from 'api/billing/checkout'; +import getUsage, { UsageResponsePayloadProps } from 'api/billing/getUsage'; +import manageCreditCardApi from 'api/billing/manage'; +import logEvent from 'api/common/logEvent'; +import Spinner from 'components/Spinner'; +import { SOMETHING_WENT_WRONG } from 'constants/api'; +import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; +import useAxiosError from 'hooks/useAxiosError'; +import useLicense from 'hooks/useLicense'; +import { useNotifications } from 'hooks/useNotifications'; +import { isEmpty, pick } from 'lodash-es'; +import { useCallback, useEffect, useState } from 'react'; +import { useTranslation } from 'react-i18next'; +import { useMutation, useQuery } from 'react-query'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout'; +import { License } from 'types/api/licenses/def'; +import AppReducer from 'types/reducer/app'; +import { isCloudUser } from 'utils/app'; +import { getFormattedDate, getRemainingDays } from 'utils/timeUtils'; + +import { BillingUsageGraph } from './BillingUsageGraph/BillingUsageGraph'; +import { prepareCsvData } from './BillingUsageGraph/utils'; + +interface DataType { + key: string; + name: string; + unit: string; + dataIngested: string; + pricePerUnit: string; + cost: string; +} + +enum SubscriptionStatus { + PastDue = 'past_due', + Active = 'active', +} + +const renderSkeletonInput = (): JSX.Element => ( + +); + +const dummyData: DataType[] = [ + { + key: '1', + name: 'Logs', + unit: '', + dataIngested: '', + pricePerUnit: '', + cost: '', + }, + { + key: '2', + name: 'Traces', + unit: '', + dataIngested: '', + pricePerUnit: '', + cost: '', + }, + { + key: '3', + name: 'Metrics', + unit: '', + dataIngested: '', + pricePerUnit: '', + cost: '', + }, +]; + +const dummyColumns: ColumnsType = [ + { + title: '', + dataIndex: 'name', + key: 'name', + render: renderSkeletonInput, + }, + { + title: 'Unit', + dataIndex: 'unit', + key: 'unit', + render: renderSkeletonInput, + }, + { + title: 'Data Ingested', + dataIndex: 'dataIngested', + key: 'dataIngested', + render: renderSkeletonInput, + }, + { + title: 'Price per Unit', + dataIndex: 'pricePerUnit', + key: 'pricePerUnit', + render: renderSkeletonInput, + }, + { + title: 'Cost (Billing period to date)', + dataIndex: 'cost', + key: 'cost', + render: renderSkeletonInput, + }, +]; + +// eslint-disable-next-line sonarjs/cognitive-complexity +export default function BillingContainer(): JSX.Element { + const { t } = useTranslation(['billings']); + const daysRemainingStr = t('days_remaining'); + const [headerText, setHeaderText] = useState(''); + const [billAmount, setBillAmount] = useState(0); + const [activeLicense, setActiveLicense] = useState(null); + const [daysRemaining, setDaysRemaining] = useState(0); + const [isFreeTrial, setIsFreeTrial] = useState(false); + const [data, setData] = useState([]); + const [apiResponse, setApiResponse] = useState< + Partial + >({}); + + const { isFetching, data: licensesData, error: licenseError } = useLicense(); + + const { user, org } = useSelector((state) => state.app); + const { notifications } = useNotifications(); + + const handleError = useAxiosError(); + + const isCloudUserVal = isCloudUser(); + + const processUsageData = useCallback( + (data: any): void => { + if (isEmpty(data?.payload)) { + return; + } + const { + details: { breakdown = [], billTotal }, + billingPeriodStart, + billingPeriodEnd, + } = data?.payload || {}; + const formattedUsageData: any[] = []; + + if (breakdown && Array.isArray(breakdown)) { + for (let index = 0; index < breakdown.length; index += 1) { + const element = breakdown[index]; + + element?.tiers.forEach( + ( + tier: { quantity: number; unitPrice: number; tierCost: number }, + i: number, + ) => { + formattedUsageData.push({ + key: `${index}${i}`, + name: i === 0 ? element?.type : '', + dataIngested: `${tier.quantity} ${element?.unit}`, + pricePerUnit: tier.unitPrice, + cost: `$ ${tier.tierCost}`, + }); + }, + ); + } + } + + setData(formattedUsageData); + + if (!licensesData?.payload?.onTrial) { + const remainingDays = getRemainingDays(billingPeriodEnd) - 1; + + setHeaderText( + `Your current billing period is from ${getFormattedDate( + billingPeriodStart, + )} to ${getFormattedDate(billingPeriodEnd)}`, + ); + setDaysRemaining(remainingDays > 0 ? remainingDays : 0); + setBillAmount(billTotal); + } + + setApiResponse(data?.payload || {}); + }, + [licensesData?.payload?.onTrial], + ); + + const isSubscriptionPastDue = + apiResponse.subscriptionStatus === SubscriptionStatus.PastDue; + + const { isLoading, isFetching: isFetchingBillingData } = useQuery( + [REACT_QUERY_KEY.GET_BILLING_USAGE, user?.userId], + { + queryFn: () => getUsage(activeLicense?.key || ''), + onError: handleError, + enabled: activeLicense !== null, + onSuccess: processUsageData, + }, + ); + + useEffect(() => { + const activeValidLicense = + licensesData?.payload?.licenses?.find( + (license) => license.isCurrent === true, + ) || null; + + setActiveLicense(activeValidLicense); + + if (!isFetching && licensesData?.payload?.onTrial && !licenseError) { + const remainingDays = getRemainingDays(licensesData?.payload?.trialEnd); + + setIsFreeTrial(true); + setBillAmount(0); + setDaysRemaining(remainingDays > 0 ? remainingDays : 0); + setHeaderText( + `You are in free trial period. Your free trial will end on ${getFormattedDate( + licensesData?.payload?.trialEnd, + )}`, + ); + } + }, [isFetching, licensesData?.payload, licenseError]); + + const columns: ColumnsType = [ + { + title: '', + dataIndex: 'name', + key: 'name', + render: (text): JSX.Element =>
{text}
, + }, + { + title: 'Data Ingested', + dataIndex: 'dataIngested', + key: 'dataIngested', + }, + { + title: 'Price per Unit', + dataIndex: 'pricePerUnit', + key: 'pricePerUnit', + }, + { + title: 'Cost (Billing period to date)', + dataIndex: 'cost', + key: 'cost', + }, + ]; + + const renderTableSkeleton = (): JSX.Element => ( +
( + + )), + }} + /> + ); + + const handleBillingOnSuccess = ( + data: ErrorResponse | SuccessResponse, + ): void => { + if (data?.payload?.redirectURL) { + const newTab = document.createElement('a'); + newTab.href = data.payload.redirectURL; + newTab.target = '_blank'; + newTab.rel = 'noopener noreferrer'; + newTab.click(); + } + }; + + const handleBillingOnError = (): void => { + notifications.error({ + message: SOMETHING_WENT_WRONG, + }); + }; + + const { mutate: updateCreditCard, isLoading: isLoadingBilling } = useMutation( + updateCreditCardApi, + { + onSuccess: (data) => { + handleBillingOnSuccess(data); + }, + onError: handleBillingOnError, + }, + ); + + const { + mutate: manageCreditCard, + isLoading: isLoadingManageBilling, + } = useMutation(manageCreditCardApi, { + onSuccess: (data) => { + handleBillingOnSuccess(data); + }, + onError: handleBillingOnError, + }); + + const handleBilling = useCallback(async () => { + if (isFreeTrial && !licensesData?.payload?.trialConvertedToSubscription) { + logEvent('Billing : Upgrade Plan', { + user: pick(user, ['email', 'userId', 'name']), + org, + }); + + updateCreditCard({ + licenseKey: activeLicense?.key || '', + successURL: window.location.href, + cancelURL: window.location.href, + }); + } else { + logEvent('Billing : Manage Billing', { + user: pick(user, ['email', 'userId', 'name']), + org, + }); + + manageCreditCard({ + licenseKey: activeLicense?.key || '', + successURL: window.location.href, + cancelURL: window.location.href, + }); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [ + activeLicense?.key, + isFreeTrial, + licensesData?.payload?.trialConvertedToSubscription, + manageCreditCard, + updateCreditCard, + ]); + + const BillingUsageGraphCallback = useCallback( + () => + !isLoading && !isFetchingBillingData ? ( + + ) : ( + + + + ), + [apiResponse, billAmount, isLoading, isFetchingBillingData], + ); + + const { Text } = Typography; + const subscriptionPastDueMessage = (): JSX.Element => ( + + {`We were not able to process payments for your account. Please update your card details `} + + {t('here')} + + {` if your payment information has changed. Email us at `} + cloud-support@signoz.io + {` otherwise. Be sure to provide this information immediately to avoid interruption to your service.`} + + ); + + const handleCsvDownload = useCallback((): void => { + try { + const csv = prepareCsvData(apiResponse); + + if (!csv.csvData || !csv.fileName) { + throw new Error('Invalid CSV data or file name.'); + } + + const csvBlob = new Blob([csv.csvData], { type: 'text/csv;charset=utf-8;' }); + const csvUrl = URL.createObjectURL(csvBlob); + const downloadLink = document.createElement('a'); + + downloadLink.href = csvUrl; + downloadLink.download = csv.fileName; + document.body.appendChild(downloadLink); // Required for Firefox + downloadLink.click(); + + // Clean up + downloadLink.remove(); + URL.revokeObjectURL(csvUrl); // Release the memory associated with the object URL + notifications.success({ + message: 'Download successful', + }); + } catch (error) { + console.error('Error downloading the CSV file:', error); + notifications.error({ + message: SOMETHING_WENT_WRONG, + }); + } + }, [apiResponse, notifications]); + + return ( +
+ + + {t('billing')} + + + {t('manage_billing_and_costs')} + + + + + + + + {isCloudUserVal ? t('enterprise_cloud') : t('enterprise')}{' '} + {isFreeTrial ? Free Trial : ''} + + {!isLoading && !isFetchingBillingData ? ( + + {daysRemaining} {daysRemainingStr} + + ) : null} + + + + + + + + {licensesData?.payload?.onTrial && + licensesData?.payload?.trialConvertedToSubscription && ( + + {t('card_details_recieved_and_billing_info')} + + )} + + {!isLoading && !isFetchingBillingData ? ( + headerText && ( + + ) + ) : ( + + )} + + {isSubscriptionPastDue && + (!isLoading && !isFetchingBillingData ? ( + + ) : ( + + ))} + + + + +
+ {!isLoading && !isFetchingBillingData && ( +
+ )} + + {(isLoading || isFetchingBillingData) && renderTableSkeleton()} + + + {isFreeTrial && !licensesData?.payload?.trialConvertedToSubscription && ( +
+ +
+ + + {t('upgrade_now_text')} + + + + {t('Your billing will start only after the trial period')} + + + + + {t('checkout_plans')}   + + {t('here')} + + + + + + + + + + )} + + ); +} diff --git a/signoz/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.styles.scss b/signoz/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.styles.scss new file mode 100644 index 0000000..e5722d4 --- /dev/null +++ b/signoz/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.styles.scss @@ -0,0 +1,29 @@ +.billing-graph-card { + .ant-card-body { + height: 40vh; + .uplot-graph-container { + padding: 8px; + } + } + .total-spent { + font-family: 'SF Mono' monospace; + font-size: 16px; + font-style: normal; + font-weight: 600; + line-height: 24px; + } + + .total-spent-title { + font-size: 12px; + font-weight: 500; + line-height: 22px; + letter-spacing: 0.48px; + color: rgba(255, 255, 255, 0.5); + } +} + +.lightMode { + .total-spent-title { + color: var(--bg-ink-100); + } +} diff --git a/signoz/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx b/signoz/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx new file mode 100644 index 0000000..3afee8d --- /dev/null +++ b/signoz/frontend/src/container/BillingContainer/BillingUsageGraph/BillingUsageGraph.tsx @@ -0,0 +1,205 @@ +import './BillingUsageGraph.styles.scss'; +import '../../../lib/uPlotLib/uPlotLib.styles.scss'; + +import { Color } from '@signozhq/design-tokens'; +import { Card, Flex, Typography } from 'antd'; +import Uplot from 'components/Uplot'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { useResizeObserver } from 'hooks/useDimensions'; +import tooltipPlugin from 'lib/uPlotLib/plugins/tooltipPlugin'; +import getAxes from 'lib/uPlotLib/utils/getAxes'; +import getRenderer from 'lib/uPlotLib/utils/getRenderer'; +import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData'; +import { getXAxisScale } from 'lib/uPlotLib/utils/getXAxisScale'; +import { getYAxisScale } from 'lib/uPlotLib/utils/getYAxisScale'; +import { useMemo, useRef } from 'react'; +import uPlot from 'uplot'; + +import { + convertDataToMetricRangePayload, + fillMissingValuesForQuantities, +} from './utils'; + +interface BillingUsageGraphProps { + data: any; + billAmount: number; +} +const paths = ( + u: any, + seriesIdx: number, + idx0: number, + idx1: number, + extendGap: boolean, + buildClip: boolean, +): uPlot.Series.PathBuilder => { + const s = u.series[seriesIdx]; + const style = s.drawStyle; + const interp = s.lineInterpolation; + + const renderer = getRenderer(style, interp); + + return renderer(u, seriesIdx, idx0, idx1, extendGap, buildClip); +}; + +const calculateStartEndTime = ( + data: any, +): { startTime: number; endTime: number } => { + const timestamps: number[] = []; + data?.details?.breakdown?.forEach((breakdown: any) => { + breakdown?.dayWiseBreakdown?.breakdown?.forEach((entry: any) => { + timestamps.push(entry?.timestamp); + }); + }); + const billingTime = [data?.billingPeriodStart, data?.billingPeriodEnd]; + const startTime: number = Math.min(...timestamps, ...billingTime); + const endTime: number = Math.max(...timestamps, ...billingTime); + return { startTime, endTime }; +}; + +export function BillingUsageGraph(props: BillingUsageGraphProps): JSX.Element { + const { data, billAmount } = props; + const graphCompatibleData = useMemo( + () => convertDataToMetricRangePayload(data), + [data], + ); + const chartData = getUPlotChartData(graphCompatibleData); + const graphRef = useRef(null); + const isDarkMode = useIsDarkMode(); + const containerDimensions = useResizeObserver(graphRef); + + const { startTime, endTime } = useMemo(() => calculateStartEndTime(data), [ + data, + ]); + + const getGraphSeries = (color: string, label: string): any => ({ + drawStyle: 'bars', + paths, + lineInterpolation: 'spline', + show: true, + label, + fill: color, + stroke: color, + width: 2, + spanGaps: true, + points: { + size: 5, + show: false, + stroke: color, + }, + }); + + const uPlotSeries: any = useMemo( + () => [ + { label: 'Timestamp', stroke: 'purple' }, + getGraphSeries( + '#7CEDBE', + graphCompatibleData.data.result[0]?.legend as string, + ), + getGraphSeries( + '#4E74F8', + graphCompatibleData.data.result[1]?.legend as string, + ), + getGraphSeries( + '#F24769', + graphCompatibleData.data.result[2]?.legend as string, + ), + ], + [graphCompatibleData.data.result], + ); + + const axesOptions = getAxes(isDarkMode, ''); + + const optionsForChart: uPlot.Options = useMemo( + () => ({ + id: 'billing-usage-breakdown', + series: uPlotSeries, + width: containerDimensions.width, + height: containerDimensions.height - 30, + axes: [ + { + ...axesOptions[0], + grid: { + ...axesOptions.grid, + show: false, + stroke: isDarkMode ? Color.BG_VANILLA_400 : Color.BG_INK_400, + }, + }, + { + ...axesOptions[1], + stroke: isDarkMode ? Color.BG_SLATE_200 : Color.BG_INK_400, + }, + ], + scales: { + x: { + ...getXAxisScale(startTime - 86400, endTime), // Minus 86400 from startTime to decrease a day to have a buffer start + }, + y: { + ...getYAxisScale({ + series: graphCompatibleData?.data?.newResult?.data?.result, + yAxisUnit: '', + softMax: null, + softMin: null, + }), + }, + }, + legend: { + show: true, + live: false, + isolate: true, + }, + cursor: { + lock: false, + focus: { + prox: 1e6, + bias: 1, + }, + }, + focus: { + alpha: 0.3, + }, + padding: [32, 32, 16, 16], + plugins: [ + tooltipPlugin({ + apiResponse: fillMissingValuesForQuantities( + graphCompatibleData, + chartData[0], + ), + yAxisUnit: '', + isBillingUsageGraphs: true, + isDarkMode, + }), + ], + }), + [ + axesOptions, + chartData, + containerDimensions.height, + containerDimensions.width, + endTime, + graphCompatibleData, + isDarkMode, + startTime, + uPlotSeries, + ], + ); + + const numberFormatter = new Intl.NumberFormat('en-US'); + + return ( + + + + + TOTAL SPENT + + + ${numberFormatter.format(billAmount)} + + + +
+ +
+
+ ); +} diff --git a/signoz/frontend/src/container/BillingContainer/BillingUsageGraph/generateCsvData.ts b/signoz/frontend/src/container/BillingContainer/BillingUsageGraph/generateCsvData.ts new file mode 100644 index 0000000..b70526a --- /dev/null +++ b/signoz/frontend/src/container/BillingContainer/BillingUsageGraph/generateCsvData.ts @@ -0,0 +1,129 @@ +import dayjs from 'dayjs'; + +export interface QuantityData { + metric: string; + values: [number, number][]; + queryName: string; + legend: string; + quantity: number[]; + unit: string; +} + +interface DataPoint { + date: string; + metric: { + total: number; + cost: number; + }; + trace: { + total: number; + cost: number; + }; + log: { + total: number; + cost: number; + }; +} + +interface CsvData { + Date: string; + 'Metrics Vol (Mn samples)': number; + 'Metrics Cost ($)': number; + 'Traces Vol (GBs)': number; + 'Traces Cost ($)': number; + 'Logs Vol (GBs)': number; + 'Logs Cost ($)': number; +} + +const formatDate = (timestamp: number): string => + dayjs.unix(timestamp).format('MM/DD/YYYY'); + +const getQuantityData = ( + data: QuantityData[], + metricName: string, +): QuantityData => { + const defaultData: QuantityData = { + metric: metricName, + values: [], + queryName: metricName, + legend: metricName, + quantity: [], + unit: '', + }; + return data.find((d) => d.metric === metricName) || defaultData; +}; + +const generateCsvData = (quantityData: QuantityData[]): any[] => { + const convertData = (data: QuantityData[]): DataPoint[] => { + const metricsData = getQuantityData(data, 'Metrics'); + const tracesData = getQuantityData(data, 'Traces'); + const logsData = getQuantityData(data, 'Logs'); + + const timestamps = metricsData.values.map((value) => value[0]); + + return timestamps.map((timestamp, index) => { + const date = formatDate(timestamp); + + return { + date, + metric: { + total: metricsData.quantity[index] ?? 0, + cost: metricsData.values[index]?.[1] ?? 0, + }, + trace: { + total: tracesData.quantity[index] ?? 0, + cost: tracesData.values[index]?.[1] ?? 0, + }, + log: { + total: logsData.quantity[index] ?? 0, + cost: logsData.values[index]?.[1] ?? 0, + }, + }; + }); + }; + + const formattedData = convertData(quantityData); + + // Calculate totals + const totals = formattedData.reduce( + (acc, dataPoint) => { + acc.metric.total += dataPoint.metric.total; + acc.metric.cost += dataPoint.metric.cost; + acc.trace.total += dataPoint.trace.total; + acc.trace.cost += dataPoint.trace.cost; + acc.log.total += dataPoint.log.total; + acc.log.cost += dataPoint.log.cost; + return acc; + }, + { + metric: { total: 0, cost: 0 }, + trace: { total: 0, cost: 0 }, + log: { total: 0, cost: 0 }, + }, + ); + + const csvData: CsvData[] = formattedData.map((dataPoint) => ({ + Date: dataPoint.date, + 'Metrics Vol (Mn samples)': parseFloat(dataPoint.metric.total.toFixed(2)), + 'Metrics Cost ($)': parseFloat(dataPoint.metric.cost.toFixed(2)), + 'Traces Vol (GBs)': parseFloat(dataPoint.trace.total.toFixed(2)), + 'Traces Cost ($)': parseFloat(dataPoint.trace.cost.toFixed(2)), + 'Logs Vol (GBs)': parseFloat(dataPoint.log.total.toFixed(2)), + 'Logs Cost ($)': parseFloat(dataPoint.log.cost.toFixed(2)), + })); + + // Add totals row + csvData.push({ + Date: 'Total', + 'Metrics Vol (Mn samples)': parseFloat(totals.metric.total.toFixed(2)), + 'Metrics Cost ($)': parseFloat(totals.metric.cost.toFixed(2)), + 'Traces Vol (GBs)': parseFloat(totals.trace.total.toFixed(2)), + 'Traces Cost ($)': parseFloat(totals.trace.cost.toFixed(2)), + 'Logs Vol (GBs)': parseFloat(totals.log.total.toFixed(2)), + 'Logs Cost ($)': parseFloat(totals.log.cost.toFixed(2)), + }); + + return csvData; +}; + +export default generateCsvData; diff --git a/signoz/frontend/src/container/BillingContainer/BillingUsageGraph/utils.ts b/signoz/frontend/src/container/BillingContainer/BillingUsageGraph/utils.ts new file mode 100644 index 0000000..5123d59 --- /dev/null +++ b/signoz/frontend/src/container/BillingContainer/BillingUsageGraph/utils.ts @@ -0,0 +1,131 @@ +import { UsageResponsePayloadProps } from 'api/billing/getUsage'; +import dayjs from 'dayjs'; +import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData'; +import { isEmpty, isNull } from 'lodash-es'; +import { unparse } from 'papaparse'; +import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; + +import generateCsvData, { QuantityData } from './generateCsvData'; + +export const convertDataToMetricRangePayload = ( + data: any, +): MetricRangePayloadProps => { + const emptyStateData = { + data: { + newResult: { data: { result: [], resultType: '' } }, + result: [], + resultType: '', + }, + }; + if (isEmpty(data)) { + return emptyStateData; + } + const { + details: { breakdown = [] }, + } = data || {}; + + if (isNull(breakdown) || breakdown.length === 0) { + return emptyStateData; + } + + const payload = breakdown.map((info: any) => { + const metric = info.type; + const sortedBreakdownData = (info?.dayWiseBreakdown?.breakdown || []).sort( + (a: any, b: any) => a.timestamp - b.timestamp, + ); + const values = (sortedBreakdownData || []).map((categoryInfo: any) => [ + categoryInfo.timestamp, + categoryInfo.total, + ]); + const queryName = info.type; + const legend = info.type; + const { unit } = info; + const quantity = sortedBreakdownData.map( + (categoryInfo: any) => categoryInfo.quantity, + ); + return { metric, values, queryName, legend, quantity, unit }; + }); + + const sortedData = payload.sort((a: any, b: any) => { + const sumA = a.values.reduce((acc: any, val: any) => acc + val[1], 0); + const avgA = a.values.length ? sumA / a.values.length : 0; + const sumB = b.values.reduce((acc: any, val: any) => acc + val[1], 0); + const avgB = b.values.length ? sumB / b.values.length : 0; + + return sumA === sumB ? avgB - avgA : sumB - sumA; + }); + + return { + data: { + newResult: { data: { result: sortedData, resultType: '' } }, + result: sortedData, + resultType: '', + }, + }; +}; + +export function quantityDataArr(data: any, timestampArray: number[]): any[] { + const { result } = data.data; + + const transformedResultArr: any[] = []; + result.forEach((item: any) => { + const timestampToQuantityMap: { [timestamp: number]: number } = {}; + item.values.forEach((val: number[], index: number) => { + timestampToQuantityMap[val[0]] = item.quantity[index]; + }); + + const quantityArray = timestampArray.map( + (timestamp: number) => timestampToQuantityMap[timestamp] ?? null, + ); + transformedResultArr.push({ ...item, quantity: quantityArray }); + }); + return transformedResultArr; +} + +export function fillMissingValuesForQuantities( + data: any, + timestampArray: number[], +): MetricRangePayloadProps { + const transformedResultArr = quantityDataArr(data, timestampArray); + + return { + data: { + newResult: { data: { result: transformedResultArr, resultType: '' } }, + result: transformedResultArr, + resultType: '', + }, + }; +} + +const formatDate = (timestamp: number): string => + dayjs.unix(timestamp).format('MM/DD/YYYY'); + +export function csvFileName(csvData: QuantityData[]): string { + if (!csvData.length) { + return `billing-usage.csv`; + } + + const { values } = csvData[0]; + + const timestamps = values.map((item) => item[0]); + const startDate = formatDate(Math.min(...timestamps)); + const endDate = formatDate(Math.max(...timestamps)); + + return `billing_usage_(${startDate}-${endDate}).csv`; +} + +export function prepareCsvData( + data: Partial, +): { + csvData: string; + fileName: string; +} { + const graphCompatibleData = convertDataToMetricRangePayload(data); + const chartData = getUPlotChartData(graphCompatibleData); + const quantityMapArr = quantityDataArr(graphCompatibleData, chartData[0]); + + return { + csvData: unparse(generateCsvData(quantityMapArr)), + fileName: csvFileName(quantityMapArr), + }; +} diff --git a/signoz/frontend/src/container/ConfigDropdown/Config/ErrorLink.tsx b/signoz/frontend/src/container/ConfigDropdown/Config/ErrorLink.tsx new file mode 100644 index 0000000..fa2f471 --- /dev/null +++ b/signoz/frontend/src/container/ConfigDropdown/Config/ErrorLink.tsx @@ -0,0 +1,33 @@ +import { PureComponent } from 'react'; + +interface State { + hasError: boolean; +} + +interface Props { + children: JSX.Element; +} + +class ErrorLink extends PureComponent { + constructor(props: Props) { + super(props); + this.state = { hasError: false }; + } + + static getDerivedStateFromError(): State { + return { hasError: true }; + } + + render(): JSX.Element { + const { children } = this.props; + const { hasError } = this.state; + + if (hasError) { + return
; + } + + return children; + } +} + +export default ErrorLink; diff --git a/signoz/frontend/src/container/ConfigDropdown/Config/Link.tsx b/signoz/frontend/src/container/ConfigDropdown/Config/Link.tsx new file mode 100644 index 0000000..b31e374 --- /dev/null +++ b/signoz/frontend/src/container/ConfigDropdown/Config/Link.tsx @@ -0,0 +1,23 @@ +import { ReactNode } from 'react'; +import { Link } from 'react-router-dom'; + +function LinkContainer({ children, href }: LinkContainerProps): JSX.Element { + const isInternalLink = href.startsWith('/'); + + if (isInternalLink) { + return {children}; + } + + return ( + + {children} + + ); +} + +interface LinkContainerProps { + children: ReactNode; + href: string; +} + +export default LinkContainer; diff --git a/signoz/frontend/src/container/ConfigDropdown/Config/index.tsx b/signoz/frontend/src/container/ConfigDropdown/Config/index.tsx new file mode 100644 index 0000000..94ad322 --- /dev/null +++ b/signoz/frontend/src/container/ConfigDropdown/Config/index.tsx @@ -0,0 +1,49 @@ +import { Menu, Space } from 'antd'; +import Spinner from 'components/Spinner'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { lazy, Suspense, useMemo } from 'react'; +import { ConfigProps } from 'types/api/dynamicConfigs/getDynamicConfigs'; +import { lazyRetry } from 'utils/lazyWithRetries'; + +import ErrorLink from './ErrorLink'; +import LinkContainer from './Link'; + +function HelpToolTip({ config }: HelpToolTipProps): JSX.Element { + const sortedConfig = useMemo( + () => config.components.sort((a, b) => a.position - b.position), + [config.components], + ); + + const isDarkMode = useIsDarkMode(); + + const items = sortedConfig.map((item) => { + const iconName = `${isDarkMode ? item.darkIcon : item.lightIcon}`; + + const Component = lazy(() => + lazyRetry(() => import(`@ant-design/icons/es/icons/${iconName}.js`)), + ); + return { + key: item.text + item.href, + label: ( + + }> + + + + {item.text} + + + + + ), + }; + }); + + return ; +} + +interface HelpToolTipProps { + config: ConfigProps; +} + +export default HelpToolTip; diff --git a/signoz/frontend/src/container/ConfigDropdown/index.tsx b/signoz/frontend/src/container/ConfigDropdown/index.tsx new file mode 100644 index 0000000..519a05c --- /dev/null +++ b/signoz/frontend/src/container/ConfigDropdown/index.tsx @@ -0,0 +1,75 @@ +import { + CaretDownFilled, + CaretUpFilled, + QuestionCircleFilled, + QuestionCircleOutlined, +} from '@ant-design/icons'; +import { Space } from 'antd'; +import { useIsDarkMode } from 'hooks/useDarkMode'; +import { useMemo, useState } from 'react'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { ConfigProps } from 'types/api/dynamicConfigs/getDynamicConfigs'; +import AppReducer from 'types/reducer/app'; + +import HelpToolTip from './Config'; +import { ConfigDropdown } from './styles'; + +function DynamicConfigDropdown({ + frontendId, +}: DynamicConfigDropdownProps): JSX.Element { + const { configs } = useSelector((state) => state.app); + const isDarkMode = useIsDarkMode(); + const [isHelpDropDownOpen, setIsHelpDropDownOpen] = useState(false); + + const config = useMemo( + () => + Object.values(configs).find( + (config) => config.frontendPositionId === frontendId, + ), + [frontendId, configs], + ); + + const onToggleHandler = (): void => { + setIsHelpDropDownOpen(!isHelpDropDownOpen); + }; + + const menu = useMemo( + () => ({ + items: [ + { + key: '1', + label: , + }, + ], + }), + [config], + ); + + if (!config) { + return
; + } + + const Icon = isDarkMode ? QuestionCircleOutlined : QuestionCircleFilled; + const DropDownIcon = isHelpDropDownOpen ? CaretUpFilled : CaretDownFilled; + + return ( + + + + + + + ); +} + +interface DynamicConfigDropdownProps { + frontendId: string; +} + +export default DynamicConfigDropdown; diff --git a/signoz/frontend/src/container/ConfigDropdown/styles.ts b/signoz/frontend/src/container/ConfigDropdown/styles.ts new file mode 100644 index 0000000..25144db --- /dev/null +++ b/signoz/frontend/src/container/ConfigDropdown/styles.ts @@ -0,0 +1,6 @@ +import { Dropdown } from 'antd'; +import styled from 'styled-components'; + +export const ConfigDropdown = styled(Dropdown)` + cursor: pointer; +`; diff --git a/signoz/frontend/src/container/Controls/config.ts b/signoz/frontend/src/container/Controls/config.ts new file mode 100644 index 0000000..51d8520 --- /dev/null +++ b/signoz/frontend/src/container/Controls/config.ts @@ -0,0 +1,9 @@ +import { CSSProperties } from 'react'; + +export const ITEMS_PER_PAGE_OPTIONS = [25, 50, 100, 200]; + +export const DEFAULT_PER_PAGE_VALUE = 100; + +export const defaultSelectStyle: CSSProperties = { + minWidth: '6rem', +}; diff --git a/signoz/frontend/src/container/Controls/index.tsx b/signoz/frontend/src/container/Controls/index.tsx new file mode 100644 index 0000000..c738939 --- /dev/null +++ b/signoz/frontend/src/container/Controls/index.tsx @@ -0,0 +1,91 @@ +import { LeftOutlined, RightOutlined } from '@ant-design/icons'; +import { Button, Select } from 'antd'; +import { DEFAULT_PER_PAGE_OPTIONS, Pagination } from 'hooks/queryPagination'; +import { memo, useMemo } from 'react'; +import { popupContainer } from 'utils/selectPopupContainer'; + +import { defaultSelectStyle } from './config'; +import { Container } from './styles'; + +function Controls({ + offset = 0, + perPageOptions = DEFAULT_PER_PAGE_OPTIONS, + isLoading, + totalCount, + countPerPage, + handleNavigatePrevious, + handleNavigateNext, + handleCountItemsPerPageChange, + isLogPanel = false, +}: ControlsProps): JSX.Element | null { + const isNextAndPreviousDisabled = useMemo( + () => isLoading || countPerPage < 0 || totalCount === 0, + [isLoading, countPerPage, totalCount], + ); + const isPreviousDisabled = useMemo( + () => (isLogPanel ? false : offset <= 0 || isNextAndPreviousDisabled), + [isLogPanel, isNextAndPreviousDisabled, offset], + ); + const isNextDisabled = useMemo( + () => + isLogPanel ? false : totalCount < countPerPage || isNextAndPreviousDisabled, + [countPerPage, isLogPanel, isNextAndPreviousDisabled, totalCount], + ); + + return ( + + + + + style={defaultSelectStyle} + loading={isLoading} + value={countPerPage} + onChange={handleCountItemsPerPageChange} + getPopupContainer={popupContainer} + > + {perPageOptions.map((count) => ( + {`${count} / page`} + ))} + + + ); +} + +Controls.defaultProps = { + offset: 0, + perPageOptions: DEFAULT_PER_PAGE_OPTIONS, + isLogPanel: false, +}; + +export interface ControlsProps { + offset?: Pagination['offset']; + perPageOptions?: number[]; + totalCount: number; + countPerPage: Pagination['limit']; + isLoading: boolean; + handleNavigatePrevious: () => void; + handleNavigateNext: () => void; + handleCountItemsPerPageChange: (value: Pagination['limit']) => void; + isLogPanel?: boolean; +} + +export default memo(Controls); diff --git a/signoz/frontend/src/container/Controls/styles.ts b/signoz/frontend/src/container/Controls/styles.ts new file mode 100644 index 0000000..0407b8f --- /dev/null +++ b/signoz/frontend/src/container/Controls/styles.ts @@ -0,0 +1,7 @@ +import styled from 'styled-components'; + +export const Container = styled.div` + display: flex; + align-items: center; + gap: 0.5rem; +`; diff --git a/signoz/frontend/src/container/CreateAlertChannels/config.ts b/signoz/frontend/src/container/CreateAlertChannels/config.ts new file mode 100644 index 0000000..3ee3882 --- /dev/null +++ b/signoz/frontend/src/container/CreateAlertChannels/config.ts @@ -0,0 +1,127 @@ +export interface Channel { + send_resolved?: boolean; + name: string; + filter?: Partial>; +} + +export interface SlackChannel extends Channel { + api_url?: string; + channel?: string; + title?: string; + text?: string; +} + +export interface WebhookChannel extends Channel { + api_url?: string; + // basic auth + username?: string; + password?: string; +} + +// PagerChannel configures alert manager to send +// events to pagerduty +export interface PagerChannel extends Channel { + // ref: https://prometheus.io/docs/alerting/latest/configuration/#pagerduty_config + routing_key?: string; + // displays source of the event in pager duty + client?: string; + client_url?: string; + // A description of the incident + description?: string; + // Severity of the incident + severity?: string; + // The part or component of the affected system that is broken + component?: string; + // A cluster or grouping of sources + group?: string; + // The class/type of the event. + class?: string; + + details?: string; + detailsArray?: Record; +} + +// OpsgenieChannel configures alert manager to send +// events to opsgenie +export interface OpsgenieChannel extends Channel { + // ref: https://prometheus.io/docs/alerting/latest/configuration/#opsgenie_config + api_key: string; + + message?: string; + + // A description of the incident + description?: string; + + // A backlink to the sender of the notification. + source?: string; + + // A set of arbitrary key/value pairs that provide further detail + // about the alert. + details?: string; + detailsArray?: Record; + + // Priority level of alert. Possible values are P1, P2, P3, P4, and P5. + priority?: string; +} + +export interface EmailChannel extends Channel { + // comma separated list of email addresses to send alerts to + to: string; + // HTML body of the email notification. + html: string; + // Further headers email header key/value pairs. + // [ headers: { : , ... } ] + headers: Record; +} + +export const ValidatePagerChannel = (p: PagerChannel): string => { + if (!p) { + return 'Received unexpected input for this channel, please contact your administrator '; + } + + if (!p.name || p.name === '') { + return 'Name is mandatory for creating a channel'; + } + + if (!p.routing_key || p.routing_key === '') { + return 'Routing Key is mandatory for creating pagerduty channel'; + } + + // validate details json + try { + JSON.parse(p.details || '{}'); + } catch (e) { + return 'failed to parse additional information, please enter a valid json'; + } + + return ''; +}; + +export enum ChannelType { + Slack = 'slack', + Email = 'email', + Webhook = 'webhook', + Pagerduty = 'pagerduty', + Opsgenie = 'opsgenie', + MsTeams = 'msteams', +} + +// LabelFilterStatement will be used for preparing filter conditions / matchers +export interface LabelFilterStatement { + // ref: https://prometheus.io/docs/alerting/latest/configuration/#matcher + + // label name + name: string; + + // comparators supported by promql are =, !=, =~, or !~. = + comparator: string; + + // filter value + value: string; +} + +export interface MsTeamsChannel extends Channel { + webhook_url?: string; + title?: string; + text?: string; +} diff --git a/signoz/frontend/src/container/CreateAlertChannels/defaults.ts b/signoz/frontend/src/container/CreateAlertChannels/defaults.ts new file mode 100644 index 0000000..f687164 --- /dev/null +++ b/signoz/frontend/src/container/CreateAlertChannels/defaults.ts @@ -0,0 +1,448 @@ +import { EmailChannel, OpsgenieChannel, PagerChannel } from './config'; + +export const PagerInitialConfig: Partial = { + description: `[{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }} + {{- if gt (len .CommonLabels) (len .GroupLabels) -}} + {{" "}}( + {{- with .CommonLabels.Remove .GroupLabels.Names }} + {{- range $index, $label := .SortedPairs -}} + {{ if $index }}, {{ end }} + {{- $label.Name }}="{{ $label.Value -}}" + {{- end }} + {{- end -}} + ) + {{- end }}`, + severity: '{{ (index .Alerts 0).Labels.severity }}', + client: 'SigNoz Alert Manager', + client_url: 'https://enter-signoz-host-n-port-here/alerts', + details: JSON.stringify({ + firing: `{{ template "pagerduty.default.instances" .Alerts.Firing }}`, + resolved: `{{ template "pagerduty.default.instances" .Alerts.Resolved }}`, + num_firing: '{{ .Alerts.Firing | len }}', + num_resolved: '{{ .Alerts.Resolved | len }}', + }), +}; + +export const OpsgenieInitialConfig: Partial = { + message: '{{ .CommonLabels.alertname }}', + description: `{{ if gt (len .Alerts.Firing) 0 -}} + Alerts Firing: + {{ range .Alerts.Firing }} + - Message: {{ .Annotations.description }} + Labels: + {{ range .Labels.SortedPairs }} - {{ .Name }} = {{ .Value }} + {{ end }} Annotations: + {{ range .Annotations.SortedPairs }} - {{ .Name }} = {{ .Value }} + {{ end }} Source: {{ .GeneratorURL }} + {{ end }} + {{- end }} + {{ if gt (len .Alerts.Resolved) 0 -}} + Alerts Resolved: + {{ range .Alerts.Resolved }} + - Message: {{ .Annotations.description }} + Labels: + {{ range .Labels.SortedPairs }} - {{ .Name }} = {{ .Value }} + {{ end }} Annotations: + {{ range .Annotations.SortedPairs }} - {{ .Name }} = {{ .Value }} + {{ end }} Source: {{ .GeneratorURL }} + {{ end }} + {{- end }}`, + priority: + '{{ if eq (index .Alerts 0).Labels.severity "critical" }}P1{{ else if eq (index .Alerts 0).Labels.severity "warning" }}P2{{ else if eq (index .Alerts 0).Labels.severity "info" }}P3{{ else }}P4{{ end }}', +}; + +export const EmailInitialConfig: Partial = { + send_resolved: true, + html: ` + + + + + + {{ template "__subject" . }} + + + +
+ + + + + +
+
+ + + {{ if gt (len .Alerts.Firing) 0 }} + + + + + +
+ {{ else }} + + {{ end }} + {{ .Alerts | len }} alert{{ if gt (len .Alerts) 1 }}s{{ end }} for {{ range .GroupLabels.SortedPairs }} + {{ .Name }}={{ .Value }} + {{ end }} +
+ + {{ if gt (len .Alerts.Firing) 0 }} + + + + {{ end }} + {{ range .Alerts.Firing }} + + + + {{ end }} + {{ if gt (len .Alerts.Resolved) 0 }} + {{ if gt (len .Alerts.Firing) 0 }} + + + + {{ end }} + + + + {{ end }} + {{ range .Alerts.Resolved }} + + + + {{ end }} +
+ [{{ .Alerts.Firing | len }}] Firing +
+ Labels
+ {{ range .Labels.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} + {{ if gt (len .Annotations) 0 }}Annotations
{{ end }} + {{ range .Annotations.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} + Source
+
+
+
+
+
+ [{{ .Alerts.Resolved | len }}] Resolved +
+ Labels
+ {{ range .Labels.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} + {{ if gt (len .Annotations) 0 }}Annotations
{{ end }} + {{ range .Annotations.SortedPairs }}{{ .Name }} = {{ .Value }}
{{ end }} + Source
+
+
+
+
+ + `, +}; diff --git a/signoz/frontend/src/container/CreateAlertChannels/index.tsx b/signoz/frontend/src/container/CreateAlertChannels/index.tsx new file mode 100644 index 0000000..7345fa4 --- /dev/null +++ b/signoz/frontend/src/container/CreateAlertChannels/index.tsx @@ -0,0 +1,596 @@ +import { Form } from 'antd'; +import createEmail from 'api/channels/createEmail'; +import createMsTeamsApi from 'api/channels/createMsTeams'; +import createOpsgenie from 'api/channels/createOpsgenie'; +import createPagerApi from 'api/channels/createPager'; +import createSlackApi from 'api/channels/createSlack'; +import createWebhookApi from 'api/channels/createWebhook'; +import testEmail from 'api/channels/testEmail'; +import testMsTeamsApi from 'api/channels/testMsTeams'; +import testOpsGenie from 'api/channels/testOpsgenie'; +import testPagerApi from 'api/channels/testPager'; +import testSlackApi from 'api/channels/testSlack'; +import testWebhookApi from 'api/channels/testWebhook'; +import logEvent from 'api/common/logEvent'; +import ROUTES from 'constants/routes'; +import FormAlertChannels from 'container/FormAlertChannels'; +import { useNotifications } from 'hooks/useNotifications'; +import history from 'lib/history'; +import { useCallback, useEffect, useState } from 'react'; +import { useTranslation } from 'react-i18next'; + +import { + ChannelType, + EmailChannel, + MsTeamsChannel, + OpsgenieChannel, + PagerChannel, + SlackChannel, + ValidatePagerChannel, + WebhookChannel, +} from './config'; +import { + EmailInitialConfig, + OpsgenieInitialConfig, + PagerInitialConfig, +} from './defaults'; +import { isChannelType } from './utils'; + +function CreateAlertChannels({ + preType = ChannelType.Slack, +}: CreateAlertChannelsProps): JSX.Element { + // init namespace for translations + const { t } = useTranslation('channels'); + + const [formInstance] = Form.useForm(); + + useEffect(() => { + logEvent('Alert Channel: Create channel page visited', {}); + }, []); + + const [selectedConfig, setSelectedConfig] = useState< + Partial< + SlackChannel & + WebhookChannel & + PagerChannel & + MsTeamsChannel & + OpsgenieChannel & + EmailChannel + > + >({ + send_resolved: true, + text: `{{ range .Alerts -}} + *Alert:* {{ .Labels.alertname }}{{ if .Labels.severity }} - {{ .Labels.severity }}{{ end }} + + *Summary:* {{ .Annotations.summary }} + *Description:* {{ .Annotations.description }} + *RelatedLogs:* {{ if gt (len .Annotations.related_logs) 0 -}} View in <{{ .Annotations.related_logs }}|logs explorer> {{- end}} + *RelatedTraces:* {{ if gt (len .Annotations.related_traces) 0 -}} View in <{{ .Annotations.related_traces }}|traces explorer> {{- end}} + + *Details:* + {{ range .Labels.SortedPairs }} • *{{ .Name }}:* {{ .Value }} + {{ end }} + {{ end }}`, + title: `[{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }} + {{- if gt (len .CommonLabels) (len .GroupLabels) -}} + {{" "}}( + {{- with .CommonLabels.Remove .GroupLabels.Names }} + {{- range $index, $label := .SortedPairs -}} + {{ if $index }}, {{ end }} + {{- $label.Name }}="{{ $label.Value -}}" + {{- end }} + {{- end -}} + ) + {{- end }}`, + }); + const [savingState, setSavingState] = useState(false); + const [testingState, setTestingState] = useState(false); + const { notifications } = useNotifications(); + + const [type, setType] = useState(preType); + const onTypeChangeHandler = useCallback( + (value: string) => { + const currentType = type; + setType(value as ChannelType); + + if (value === ChannelType.Pagerduty && currentType !== value) { + // reset config to pager defaults + setSelectedConfig({ + name: selectedConfig?.name, + send_resolved: selectedConfig.send_resolved, + ...PagerInitialConfig, + }); + } + + if (value === ChannelType.Opsgenie && currentType !== value) { + setSelectedConfig((selectedConfig) => ({ + ...selectedConfig, + ...OpsgenieInitialConfig, + })); + } + + // reset config to email defaults + if (value === ChannelType.Email && currentType !== value) { + setSelectedConfig((selectedConfig) => ({ + ...selectedConfig, + ...EmailInitialConfig, + })); + } + }, + [type, selectedConfig], + ); + + const prepareSlackRequest = useCallback( + () => ({ + api_url: selectedConfig?.api_url || '', + channel: selectedConfig?.channel || '', + name: selectedConfig?.name || '', + send_resolved: selectedConfig?.send_resolved || false, + text: selectedConfig?.text || '', + title: selectedConfig?.title || '', + }), + [selectedConfig], + ); + + const onSlackHandler = useCallback(async () => { + setSavingState(true); + + try { + const response = await createSlackApi(prepareSlackRequest()); + + if (response.statusCode === 200) { + notifications.success({ + message: 'Success', + description: t('channel_creation_done'), + }); + history.replace(ROUTES.ALL_CHANNELS); + return { status: 'success', statusMessage: t('channel_creation_done') }; + } + notifications.error({ + message: 'Error', + description: response.error || t('channel_creation_failed'), + }); + return { + status: 'failed', + statusMessage: response.error || t('channel_creation_failed'), + }; + } catch (error) { + notifications.error({ + message: 'Error', + description: t('channel_creation_failed'), + }); + return { status: 'failed', statusMessage: t('channel_creation_failed') }; + } finally { + setSavingState(false); + } + }, [prepareSlackRequest, t, notifications]); + + const prepareWebhookRequest = useCallback(() => { + // initial api request without auth params + let request: WebhookChannel = { + api_url: selectedConfig?.api_url || '', + name: selectedConfig?.name || '', + send_resolved: selectedConfig?.send_resolved || false, + }; + + if (selectedConfig?.username !== '' || selectedConfig?.password !== '') { + if (selectedConfig?.username !== '') { + // if username is not null then password must be passed + if (selectedConfig?.password !== '') { + request = { + ...request, + username: selectedConfig.username, + password: selectedConfig.password, + }; + } else { + notifications.error({ + message: 'Error', + description: t('username_no_password'), + }); + } + } else if (selectedConfig?.password !== '') { + // only password entered, set bearer token + request = { + ...request, + username: '', + password: selectedConfig.password, + }; + } + } + return request; + }, [notifications, t, selectedConfig]); + + const onWebhookHandler = useCallback(async () => { + setSavingState(true); + try { + const request = prepareWebhookRequest(); + const response = await createWebhookApi(request); + if (response.statusCode === 200) { + notifications.success({ + message: 'Success', + description: t('channel_creation_done'), + }); + history.replace(ROUTES.ALL_CHANNELS); + return { status: 'success', statusMessage: t('channel_creation_done') }; + } + notifications.error({ + message: 'Error', + description: response.error || t('channel_creation_failed'), + }); + return { + status: 'failed', + statusMessage: response.error || t('channel_creation_failed'), + }; + } catch (error) { + notifications.error({ + message: 'Error', + description: t('channel_creation_failed'), + }); + return { status: 'failed', statusMessage: t('channel_creation_failed') }; + } finally { + setSavingState(false); + } + }, [prepareWebhookRequest, t, notifications]); + + const preparePagerRequest = useCallback(() => { + const validationError = ValidatePagerChannel(selectedConfig as PagerChannel); + if (validationError !== '') { + notifications.error({ + message: 'Error', + description: validationError, + }); + return null; + } + + return { + name: selectedConfig?.name || '', + send_resolved: selectedConfig?.send_resolved || false, + routing_key: selectedConfig?.routing_key || '', + client: selectedConfig?.client || '', + client_url: selectedConfig?.client_url || '', + description: selectedConfig?.description || '', + severity: selectedConfig?.severity || '', + component: selectedConfig?.component || '', + group: selectedConfig?.group || '', + class: selectedConfig?.class || '', + details: selectedConfig.details || '', + detailsArray: JSON.parse(selectedConfig.details || '{}'), + }; + }, [selectedConfig, notifications]); + + const onPagerHandler = useCallback(async () => { + setSavingState(true); + const request = preparePagerRequest(); + + try { + if (request) { + const response = await createPagerApi(request); + + if (response.statusCode === 200) { + notifications.success({ + message: 'Success', + description: t('channel_creation_done'), + }); + history.replace(ROUTES.ALL_CHANNELS); + return { status: 'success', statusMessage: t('channel_creation_done') }; + } + notifications.error({ + message: 'Error', + description: response.error || t('channel_creation_failed'), + }); + return { + status: 'failed', + statusMessage: response.error || t('channel_creation_failed'), + }; + } + notifications.error({ + message: 'Error', + description: t('channel_creation_failed'), + }); + return { status: 'failed', statusMessage: t('channel_creation_failed') }; + } catch (error) { + notifications.error({ + message: 'Error', + description: t('channel_creation_failed'), + }); + return { status: 'failed', statusMessage: t('channel_creation_failed') }; + } finally { + setSavingState(false); + } + }, [t, notifications, preparePagerRequest]); + + const prepareOpsgenieRequest = useCallback( + () => ({ + api_key: selectedConfig?.api_key || '', + name: selectedConfig?.name || '', + send_resolved: selectedConfig?.send_resolved || false, + description: selectedConfig?.description || '', + message: selectedConfig?.message || '', + priority: selectedConfig?.priority || '', + }), + [selectedConfig], + ); + + const onOpsgenieHandler = useCallback(async () => { + setSavingState(true); + + try { + const response = await createOpsgenie(prepareOpsgenieRequest()); + + if (response.statusCode === 200) { + notifications.success({ + message: 'Success', + description: t('channel_creation_done'), + }); + history.replace(ROUTES.ALL_CHANNELS); + return { status: 'success', statusMessage: t('channel_creation_done') }; + } + notifications.error({ + message: 'Error', + description: response.error || t('channel_creation_failed'), + }); + return { + status: 'failed', + statusMessage: response.error || t('channel_creation_failed'), + }; + } catch (error) { + notifications.error({ + message: 'Error', + description: t('channel_creation_failed'), + }); + return { status: 'failed', statusMessage: t('channel_creation_failed') }; + } finally { + setSavingState(false); + } + }, [prepareOpsgenieRequest, t, notifications]); + + const prepareEmailRequest = useCallback( + () => ({ + name: selectedConfig?.name || '', + send_resolved: selectedConfig?.send_resolved || false, + to: selectedConfig?.to || '', + html: selectedConfig?.html || '', + headers: selectedConfig?.headers || {}, + }), + [selectedConfig], + ); + + const onEmailHandler = useCallback(async () => { + setSavingState(true); + try { + const request = prepareEmailRequest(); + const response = await createEmail(request); + if (response.statusCode === 200) { + notifications.success({ + message: 'Success', + description: t('channel_creation_done'), + }); + history.replace(ROUTES.ALL_CHANNELS); + return { status: 'success', statusMessage: t('channel_creation_done') }; + } + notifications.error({ + message: 'Error', + description: response.error || t('channel_creation_failed'), + }); + return { + status: 'failed', + statusMessage: response.error || t('channel_creation_failed'), + }; + } catch (error) { + notifications.error({ + message: 'Error', + description: t('channel_creation_failed'), + }); + return { status: 'failed', statusMessage: t('channel_creation_failed') }; + } finally { + setSavingState(false); + } + }, [prepareEmailRequest, t, notifications]); + + const prepareMsTeamsRequest = useCallback( + () => ({ + webhook_url: selectedConfig?.webhook_url || '', + name: selectedConfig?.name || '', + send_resolved: selectedConfig?.send_resolved || false, + text: selectedConfig?.text || '', + title: selectedConfig?.title || '', + }), + [selectedConfig], + ); + + const onMsTeamsHandler = useCallback(async () => { + setSavingState(true); + + try { + const response = await createMsTeamsApi(prepareMsTeamsRequest()); + + if (response.statusCode === 200) { + notifications.success({ + message: 'Success', + description: t('channel_creation_done'), + }); + history.replace(ROUTES.ALL_CHANNELS); + return { status: 'success', statusMessage: t('channel_creation_done') }; + } + notifications.error({ + message: 'Error', + description: response.error || t('channel_creation_failed'), + }); + return { + status: 'failed', + statusMessage: response.error || t('channel_creation_failed'), + }; + } catch (error) { + notifications.error({ + message: 'Error', + description: t('channel_creation_failed'), + }); + return { status: 'failed', statusMessage: t('channel_creation_failed') }; + } finally { + setSavingState(false); + } + }, [prepareMsTeamsRequest, t, notifications]); + + const onSaveHandler = useCallback( + async (value: ChannelType) => { + const functionMapper = { + [ChannelType.Slack]: onSlackHandler, + [ChannelType.Webhook]: onWebhookHandler, + [ChannelType.Pagerduty]: onPagerHandler, + [ChannelType.Opsgenie]: onOpsgenieHandler, + [ChannelType.MsTeams]: onMsTeamsHandler, + [ChannelType.Email]: onEmailHandler, + }; + + if (isChannelType(value)) { + const functionToCall = functionMapper[value as keyof typeof functionMapper]; + + if (functionToCall) { + const result = await functionToCall(); + logEvent('Alert Channel: Save channel', { + type: value, + sendResolvedAlert: selectedConfig?.send_resolved, + name: selectedConfig?.name, + new: 'true', + status: result?.status, + statusMessage: result?.statusMessage, + }); + } else { + notifications.error({ + message: 'Error', + description: t('selected_channel_invalid'), + }); + } + } + }, + // eslint-disable-next-line react-hooks/exhaustive-deps + [ + onSlackHandler, + onWebhookHandler, + onPagerHandler, + onOpsgenieHandler, + onMsTeamsHandler, + onEmailHandler, + notifications, + t, + ], + ); + + const performChannelTest = useCallback( + async (channelType: ChannelType) => { + setTestingState(true); + try { + let request; + let response; + switch (channelType) { + case ChannelType.Webhook: + request = prepareWebhookRequest(); + response = await testWebhookApi(request); + break; + case ChannelType.Slack: + request = prepareSlackRequest(); + response = await testSlackApi(request); + break; + case ChannelType.Pagerduty: + request = preparePagerRequest(); + if (request) response = await testPagerApi(request); + break; + case ChannelType.MsTeams: + request = prepareMsTeamsRequest(); + response = await testMsTeamsApi(request); + break; + case ChannelType.Opsgenie: + request = prepareOpsgenieRequest(); + response = await testOpsGenie(request); + break; + case ChannelType.Email: + request = prepareEmailRequest(); + response = await testEmail(request); + break; + default: + notifications.error({ + message: 'Error', + description: t('test_unsupported'), + }); + setTestingState(false); + return; + } + + if (response && response.statusCode === 200) { + notifications.success({ + message: 'Success', + description: t('channel_test_done'), + }); + } else { + notifications.error({ + message: 'Error', + description: t('channel_test_failed'), + }); + } + + logEvent('Alert Channel: Test notification', { + type: channelType, + sendResolvedAlert: selectedConfig?.send_resolved, + name: selectedConfig?.name, + new: 'true', + status: + response && response.statusCode === 200 ? 'Test success' : 'Test failed', + }); + } catch (error) { + notifications.error({ + message: 'Error', + description: t('channel_test_unexpected'), + }); + } + + setTestingState(false); + }, + // eslint-disable-next-line react-hooks/exhaustive-deps + [ + prepareWebhookRequest, + t, + preparePagerRequest, + prepareOpsgenieRequest, + prepareSlackRequest, + prepareMsTeamsRequest, + prepareEmailRequest, + notifications, + ], + ); + + const onTestHandler = useCallback( + async (value: ChannelType) => { + performChannelTest(value); + }, + [performChannelTest], + ); + + return ( + + ); +} + +interface CreateAlertChannelsProps { + preType: ChannelType; +} + +export default CreateAlertChannels; diff --git a/signoz/frontend/src/container/CreateAlertChannels/utils.ts b/signoz/frontend/src/container/CreateAlertChannels/utils.ts new file mode 100644 index 0000000..ce7520a --- /dev/null +++ b/signoz/frontend/src/container/CreateAlertChannels/utils.ts @@ -0,0 +1,4 @@ +import { ChannelType } from './config'; + +export const isChannelType = (type: string): type is ChannelType => + Object.values(ChannelType).includes(type as ChannelType); diff --git a/signoz/frontend/src/container/CreateAlertRule/SelectAlertType/config.ts b/signoz/frontend/src/container/CreateAlertRule/SelectAlertType/config.ts new file mode 100644 index 0000000..c973684 --- /dev/null +++ b/signoz/frontend/src/container/CreateAlertRule/SelectAlertType/config.ts @@ -0,0 +1,27 @@ +import { TFunction } from 'i18next'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; + +import { OptionType } from './types'; + +export const getOptionList = (t: TFunction): OptionType[] => [ + { + title: t('metric_based_alert'), + selection: AlertTypes.METRICS_BASED_ALERT, + description: t('metric_based_alert_desc'), + }, + { + title: t('log_based_alert'), + selection: AlertTypes.LOGS_BASED_ALERT, + description: t('log_based_alert_desc'), + }, + { + title: t('traces_based_alert'), + selection: AlertTypes.TRACES_BASED_ALERT, + description: t('traces_based_alert_desc'), + }, + { + title: t('exceptions_based_alert'), + selection: AlertTypes.EXCEPTIONS_BASED_ALERT, + description: t('exceptions_based_alert_desc'), + }, +]; diff --git a/signoz/frontend/src/container/CreateAlertRule/SelectAlertType/index.tsx b/signoz/frontend/src/container/CreateAlertRule/SelectAlertType/index.tsx new file mode 100644 index 0000000..52f4d52 --- /dev/null +++ b/signoz/frontend/src/container/CreateAlertRule/SelectAlertType/index.tsx @@ -0,0 +1,95 @@ +import { Row, Typography } from 'antd'; +import logEvent from 'api/common/logEvent'; +import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts'; +import { useMemo } from 'react'; +import { useTranslation } from 'react-i18next'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; + +import { getOptionList } from './config'; +import { AlertTypeCard, SelectTypeContainer } from './styles'; +import { OptionType } from './types'; + +function SelectAlertType({ onSelect }: SelectAlertTypeProps): JSX.Element { + const { t } = useTranslation(['alerts']); + + const optionList = getOptionList(t); + + function handleRedirection(option: AlertTypes): void { + let url = ''; + switch (option) { + case AlertTypes.METRICS_BASED_ALERT: + url = + 'https://signoz.io/docs/alerts-management/metrics-based-alerts/?utm_source=product&utm_medium=alert-source-selection-page#examples'; + break; + case AlertTypes.LOGS_BASED_ALERT: + url = + 'https://signoz.io/docs/alerts-management/log-based-alerts/?utm_source=product&utm_medium=alert-source-selection-page#examples'; + break; + case AlertTypes.TRACES_BASED_ALERT: + url = + 'https://signoz.io/docs/alerts-management/trace-based-alerts/?utm_source=product&utm_medium=alert-source-selection-page#examples'; + break; + case AlertTypes.EXCEPTIONS_BASED_ALERT: + url = + 'https://signoz.io/docs/alerts-management/exceptions-based-alerts/?utm_source=product&utm_medium=alert-source-selection-page#examples'; + break; + default: + break; + } + + logEvent('Alert: Sample alert link clicked', { + dataSource: ALERTS_DATA_SOURCE_MAP[option], + link: url, + page: 'New alert data source selection page', + }); + + window.open(url, '_blank'); + } + const renderOptions = useMemo( + () => ( + <> + {optionList.map((option: OptionType) => ( + { + onSelect(option.selection); + }} + > + {option.description}{' '} + { + e.preventDefault(); + e.stopPropagation(); + handleRedirection(option.selection); + }} + > + Click here to see how to create a sample alert. + {' '} + + ))} + + ), + [onSelect, optionList], + ); + + return ( + + + {t('choose_alert_type')} + + {renderOptions} + + ); +} + +interface SelectAlertTypeProps { + onSelect: (typ: AlertTypes) => void; +} + +export default SelectAlertType; diff --git a/signoz/frontend/src/container/CreateAlertRule/SelectAlertType/styles.ts b/signoz/frontend/src/container/CreateAlertRule/SelectAlertType/styles.ts new file mode 100644 index 0000000..9c3323a --- /dev/null +++ b/signoz/frontend/src/container/CreateAlertRule/SelectAlertType/styles.ts @@ -0,0 +1,16 @@ +import { Card } from 'antd'; +import styled from 'styled-components'; + +export const SelectTypeContainer = styled.div` + &&& { + padding: 1rem; + } +`; + +export const AlertTypeCard = styled(Card)` + &&& { + margin: 5px; + width: 21rem; + cursor: pointer; + } +`; diff --git a/signoz/frontend/src/container/CreateAlertRule/SelectAlertType/types.ts b/signoz/frontend/src/container/CreateAlertRule/SelectAlertType/types.ts new file mode 100644 index 0000000..670f5a2 --- /dev/null +++ b/signoz/frontend/src/container/CreateAlertRule/SelectAlertType/types.ts @@ -0,0 +1,7 @@ +import { AlertTypes } from 'types/api/alerts/alertTypes'; + +export interface OptionType { + title: string; + selection: AlertTypes; + description: string; +} diff --git a/signoz/frontend/src/container/CreateAlertRule/config.ts b/signoz/frontend/src/container/CreateAlertRule/config.ts new file mode 100644 index 0000000..fe52bb1 --- /dev/null +++ b/signoz/frontend/src/container/CreateAlertRule/config.ts @@ -0,0 +1,8 @@ +import { AlertTypes } from 'types/api/alerts/alertTypes'; +import { DataSource } from 'types/common/queryBuilder'; + +export const ALERT_TYPE_VS_SOURCE_MAPPING = { + [DataSource.LOGS]: AlertTypes.LOGS_BASED_ALERT, + [DataSource.METRICS]: AlertTypes.METRICS_BASED_ALERT, + [DataSource.TRACES]: AlertTypes.TRACES_BASED_ALERT, +}; diff --git a/signoz/frontend/src/container/CreateAlertRule/defaults.ts b/signoz/frontend/src/container/CreateAlertRule/defaults.ts new file mode 100644 index 0000000..20cd020 --- /dev/null +++ b/signoz/frontend/src/container/CreateAlertRule/defaults.ts @@ -0,0 +1,152 @@ +import { ENTITY_VERSION_V4 } from 'constants/app'; +import { + initialQueryBuilderFormValuesMap, + initialQueryPromQLData, + PANEL_TYPES, +} from 'constants/queryBuilder'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; +import { + AlertDef, + defaultCompareOp, + defaultEvalWindow, + defaultMatchType, +} from 'types/api/alerts/def'; +import { EQueryType } from 'types/common/dashboard'; + +const defaultAlertDescription = + 'This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})'; +const defaultAlertSummary = + 'The rule threshold is set to {{$threshold}}, and the observed metric value is {{$value}}'; + +const defaultAnnotations = { + description: defaultAlertDescription, + summary: defaultAlertSummary, +}; + +export const alertDefaults: AlertDef = { + alertType: AlertTypes.METRICS_BASED_ALERT, + version: ENTITY_VERSION_V4, + condition: { + compositeQuery: { + builderQueries: { + A: initialQueryBuilderFormValuesMap.metrics, + }, + promQueries: { A: initialQueryPromQLData }, + chQueries: { + A: { + name: 'A', + query: ``, + legend: '', + disabled: false, + }, + }, + queryType: EQueryType.QUERY_BUILDER, + panelType: PANEL_TYPES.TIME_SERIES, + unit: undefined, + }, + op: defaultCompareOp, + matchType: defaultMatchType, + }, + labels: { + severity: 'warning', + }, + annotations: defaultAnnotations, + evalWindow: defaultEvalWindow, +}; + +export const logAlertDefaults: AlertDef = { + alertType: AlertTypes.LOGS_BASED_ALERT, + condition: { + compositeQuery: { + builderQueries: { + A: initialQueryBuilderFormValuesMap.logs, + }, + promQueries: { A: initialQueryPromQLData }, + chQueries: { + A: { + name: 'A', + query: `select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}} \nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval`, + legend: '', + disabled: false, + }, + }, + queryType: EQueryType.QUERY_BUILDER, + panelType: PANEL_TYPES.TIME_SERIES, + unit: undefined, + }, + op: defaultCompareOp, + matchType: '4', + }, + labels: { + severity: 'warning', + }, + annotations: defaultAnnotations, + evalWindow: defaultEvalWindow, +}; + +export const traceAlertDefaults: AlertDef = { + alertType: AlertTypes.TRACES_BASED_ALERT, + condition: { + compositeQuery: { + builderQueries: { + A: initialQueryBuilderFormValuesMap.traces, + }, + promQueries: { A: initialQueryPromQLData }, + chQueries: { + A: { + name: 'A', + query: `SELECT \n\ttoStartOfInterval(timestamp, INTERVAL 1 MINUTE) AS interval, \n\ttagMap['peer.service'] AS op_name, \n\ttoFloat64(avg(durationNano)) AS value \nFROM signoz_traces.distributed_signoz_index_v2 \nWHERE tagMap['peer.service']!='' \nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}} \nGROUP BY (op_name, interval);\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`, + legend: '', + disabled: false, + }, + }, + queryType: EQueryType.QUERY_BUILDER, + panelType: PANEL_TYPES.TIME_SERIES, + unit: undefined, + }, + op: defaultCompareOp, + matchType: '4', + }, + labels: { + severity: 'warning', + }, + annotations: defaultAnnotations, + evalWindow: defaultEvalWindow, +}; + +export const exceptionAlertDefaults: AlertDef = { + alertType: AlertTypes.EXCEPTIONS_BASED_ALERT, + condition: { + compositeQuery: { + builderQueries: { + A: initialQueryBuilderFormValuesMap.traces, + }, + promQueries: { A: initialQueryPromQLData }, + chQueries: { + A: { + name: 'A', + query: `SELECT \n\tcount() as value,\n\ttoStartOfInterval(timestamp, toIntervalMinute(1)) AS interval,\n\tserviceName\nFROM signoz_traces.distributed_signoz_error_index_v2\nWHERE exceptionType !='OSError'\nAND timestamp BETWEEN {{.start_datetime}} AND {{.end_datetime}}\nGROUP BY serviceName, interval;\n\n-- available variables:\n-- \t{{.start_datetime}}\n-- \t{{.end_datetime}}\n\n-- required column alias:\n-- \tvalue\n-- \tinterval`, + legend: '', + disabled: false, + }, + }, + queryType: EQueryType.CLICKHOUSE, + panelType: PANEL_TYPES.TIME_SERIES, + unit: undefined, + }, + op: defaultCompareOp, + matchType: '4', + }, + labels: { + severity: 'warning', + }, + annotations: defaultAnnotations, + evalWindow: defaultEvalWindow, +}; + +export const ALERTS_VALUES_MAP: Record = { + [AlertTypes.METRICS_BASED_ALERT]: alertDefaults, + [AlertTypes.LOGS_BASED_ALERT]: logAlertDefaults, + [AlertTypes.TRACES_BASED_ALERT]: traceAlertDefaults, + [AlertTypes.EXCEPTIONS_BASED_ALERT]: exceptionAlertDefaults, +}; diff --git a/signoz/frontend/src/container/CreateAlertRule/index.tsx b/signoz/frontend/src/container/CreateAlertRule/index.tsx new file mode 100644 index 0000000..f7e491c --- /dev/null +++ b/signoz/frontend/src/container/CreateAlertRule/index.tsx @@ -0,0 +1,96 @@ +import { Form, Row } from 'antd'; +import logEvent from 'api/common/logEvent'; +import { ENTITY_VERSION_V4 } from 'constants/app'; +import { QueryParams } from 'constants/query'; +import FormAlertRules from 'container/FormAlertRules'; +import { useGetCompositeQueryParam } from 'hooks/queryBuilder/useGetCompositeQueryParam'; +import history from 'lib/history'; +import { useEffect, useState } from 'react'; +import { useLocation } from 'react-router-dom'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; +import { AlertDef } from 'types/api/alerts/def'; + +import { ALERT_TYPE_VS_SOURCE_MAPPING } from './config'; +import { + alertDefaults, + exceptionAlertDefaults, + logAlertDefaults, + traceAlertDefaults, +} from './defaults'; +import SelectAlertType from './SelectAlertType'; + +function CreateRules(): JSX.Element { + const [initValues, setInitValues] = useState(null); + + const location = useLocation(); + const queryParams = new URLSearchParams(location.search); + const version = queryParams.get('version'); + const alertTypeFromParams = queryParams.get(QueryParams.alertType); + + const compositeQuery = useGetCompositeQueryParam(); + function getAlertTypeFromDataSource(): AlertTypes | null { + if (!compositeQuery) { + return null; + } + const dataSource = compositeQuery?.builder?.queryData[0]?.dataSource; + + return ALERT_TYPE_VS_SOURCE_MAPPING[dataSource]; + } + + const [alertType, setAlertType] = useState( + (alertTypeFromParams as AlertTypes) || getAlertTypeFromDataSource(), + ); + + const [formInstance] = Form.useForm(); + + const onSelectType = (typ: AlertTypes): void => { + setAlertType(typ); + switch (typ) { + case AlertTypes.LOGS_BASED_ALERT: + setInitValues(logAlertDefaults); + break; + case AlertTypes.TRACES_BASED_ALERT: + setInitValues(traceAlertDefaults); + break; + case AlertTypes.EXCEPTIONS_BASED_ALERT: + setInitValues(exceptionAlertDefaults); + break; + default: + setInitValues({ + ...alertDefaults, + version: version || ENTITY_VERSION_V4, + }); + } + queryParams.set(QueryParams.alertType, typ); + const generatedUrl = `${location.pathname}?${queryParams.toString()}`; + history.replace(generatedUrl); + }; + + useEffect(() => { + if (alertType) { + onSelectType(alertType); + } else { + logEvent('Alert: New alert data source selection page visited', {}); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [alertType]); + + if (!initValues) { + return ( + + + + ); + } + + return ( + + ); +} + +export default CreateRules; diff --git a/signoz/frontend/src/container/Download/Download.styles.scss b/signoz/frontend/src/container/Download/Download.styles.scss new file mode 100644 index 0000000..9352530 --- /dev/null +++ b/signoz/frontend/src/container/Download/Download.styles.scss @@ -0,0 +1,4 @@ +.download-button { + display: flex; + align-items: center; +} \ No newline at end of file diff --git a/signoz/frontend/src/container/Download/Download.tsx b/signoz/frontend/src/container/Download/Download.tsx new file mode 100644 index 0000000..666b015 --- /dev/null +++ b/signoz/frontend/src/container/Download/Download.tsx @@ -0,0 +1,77 @@ +import './Download.styles.scss'; + +import { CloudDownloadOutlined } from '@ant-design/icons'; +import { Button, Dropdown, MenuProps } from 'antd'; +import { Excel } from 'antd-table-saveas-excel'; +import { unparse } from 'papaparse'; + +import { DownloadProps } from './Download.types'; + +function Download({ data, isLoading, fileName }: DownloadProps): JSX.Element { + const downloadExcelFile = (): void => { + const headers = Object.keys(Object.assign({}, ...data)).map((item) => { + const updatedTitle = item + .split('_') + .map((word) => word.charAt(0).toUpperCase() + word.slice(1)) + .join(' '); + return { + title: updatedTitle, + dataIndex: item, + }; + }); + const excel = new Excel(); + excel + .addSheet(fileName) + .addColumns(headers) + .addDataSource(data, { + str2Percent: true, + }) + .saveAs(`${fileName}.xlsx`); + }; + + const downloadCsvFile = (): void => { + const csv = unparse(data); + const csvBlob = new Blob([csv], { type: 'text/csv;charset=utf-8;' }); + const csvUrl = URL.createObjectURL(csvBlob); + const downloadLink = document.createElement('a'); + downloadLink.href = csvUrl; + downloadLink.download = `${fileName}.csv`; + downloadLink.click(); + downloadLink.remove(); + }; + + const menu: MenuProps = { + items: [ + { + key: 'download-as-excel', + label: 'Excel', + onClick: downloadExcelFile, + }, + { + key: 'download-as-csv', + label: 'CSV', + onClick: downloadCsvFile, + }, + ], + }; + + return ( + + + + ); +} + +Download.defaultProps = { + isLoading: undefined, +}; + +export default Download; diff --git a/signoz/frontend/src/container/Download/Download.types.ts b/signoz/frontend/src/container/Download/Download.types.ts new file mode 100644 index 0000000..0757ed6 --- /dev/null +++ b/signoz/frontend/src/container/Download/Download.types.ts @@ -0,0 +1,10 @@ +export type DownloadOptions = { + isDownloadEnabled: boolean; + fileName: string; +}; + +export type DownloadProps = { + data: Record[]; + isLoading?: boolean; + fileName: string; +}; diff --git a/signoz/frontend/src/container/DownloadV2/DownloadV2.styles.scss b/signoz/frontend/src/container/DownloadV2/DownloadV2.styles.scss new file mode 100644 index 0000000..850c1c7 --- /dev/null +++ b/signoz/frontend/src/container/DownloadV2/DownloadV2.styles.scss @@ -0,0 +1,84 @@ +.download-logs-popover { + .ant-popover-inner { + border-radius: 4px; + border: 1px solid var(--bg-slate-400); + background: linear-gradient( + 139deg, + rgba(18, 19, 23, 0.8) 0%, + rgba(18, 19, 23, 0.9) 98.68% + ); + box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2); + backdrop-filter: blur(20px); + padding: 12px 18px 12px 14px; + + .download-logs-content { + display: flex; + flex-direction: column; + gap: 8px; + align-items: flex-start; + + .action-btns { + padding: 4px 0px !important; + width: 159px; + display: flex; + align-items: center; + color: var(--bg-vanilla-400); + font-size: 14px; + font-style: normal; + font-weight: 400; + line-height: normal; + letter-spacing: 0.14px; + gap: 6px; + + .ant-btn-icon { + margin-inline-end: 0px; + } + } + + .action-btns:hover { + &.ant-btn-text { + background-color: rgba(171, 189, 255, 0.04) !important; + } + } + + .export-heading { + color: #52575c; + font-size: 11px; + font-style: normal; + font-weight: 600; + line-height: 18px; /* 163.636% */ + letter-spacing: 0.88px; + text-transform: uppercase; + } + } + } +} + +.lightMode { + .download-logs-popover { + .ant-popover-inner { + border: 1px solid var(--bg-vanilla-300); + background: linear-gradient( + 139deg, + rgba(255, 255, 255, 0.8) 0%, + rgba(255, 255, 255, 0.9) 98.68% + ); + + box-shadow: 4px 10px 16px 2px rgba(255, 255, 255, 0.2); + + .download-logs-content { + .action-btns { + color: var(--bg-ink-400); + } + .action-btns:hover { + &.ant-btn-text { + background-color: var(--bg-vanilla-300) !important; + } + } + .export-heading { + color: var(--bg-ink-200); + } + } + } + } +} diff --git a/signoz/frontend/src/container/DownloadV2/DownloadV2.tsx b/signoz/frontend/src/container/DownloadV2/DownloadV2.tsx new file mode 100644 index 0000000..95630ef --- /dev/null +++ b/signoz/frontend/src/container/DownloadV2/DownloadV2.tsx @@ -0,0 +1,84 @@ +import './DownloadV2.styles.scss'; + +import { Button, Popover, Typography } from 'antd'; +import { Excel } from 'antd-table-saveas-excel'; +import { FileDigit, FileDown, Sheet } from 'lucide-react'; +import { unparse } from 'papaparse'; + +import { DownloadProps } from './DownloadV2.types'; + +function Download({ data, isLoading, fileName }: DownloadProps): JSX.Element { + const downloadExcelFile = (): void => { + const headers = Object.keys(Object.assign({}, ...data)).map((item) => { + const updatedTitle = item + .split('_') + .map((word) => word.charAt(0).toUpperCase() + word.slice(1)) + .join(' '); + return { + title: updatedTitle, + dataIndex: item, + }; + }); + const excel = new Excel(); + excel + .addSheet(fileName) + .addColumns(headers) + .addDataSource(data, { + str2Percent: true, + }) + .saveAs(`${fileName}.xlsx`); + }; + + const downloadCsvFile = (): void => { + const csv = unparse(data); + const csvBlob = new Blob([csv], { type: 'text/csv;charset=utf-8;' }); + const csvUrl = URL.createObjectURL(csvBlob); + const downloadLink = document.createElement('a'); + downloadLink.href = csvUrl; + downloadLink.download = `${fileName}.csv`; + downloadLink.click(); + downloadLink.remove(); + }; + + return ( + + Export As + + + + } + > + + + + + + + + {t('see_trace_graph')} + + + + {t('stack_trace')} +
+ +
+ + + + + + + + ); +} + +interface ErrorDetailsProps { + idPayload: GetByErrorTypeAndServicePayload; +} + +export default ErrorDetails; diff --git a/signoz/frontend/src/container/ErrorDetails/styles.scss b/signoz/frontend/src/container/ErrorDetails/styles.scss new file mode 100644 index 0000000..31194b4 --- /dev/null +++ b/signoz/frontend/src/container/ErrorDetails/styles.scss @@ -0,0 +1,3 @@ +.error-container { + height: 50vh; +} diff --git a/signoz/frontend/src/container/ErrorDetails/styles.ts b/signoz/frontend/src/container/ErrorDetails/styles.ts new file mode 100644 index 0000000..d1cd032 --- /dev/null +++ b/signoz/frontend/src/container/ErrorDetails/styles.ts @@ -0,0 +1,28 @@ +import { grey } from '@ant-design/colors'; +import styled from 'styled-components'; + +export const DashedContainer = styled.div` + border: ${`1px dashed ${grey[0]}`}; + box-sizing: border-box; + border-radius: 0.25rem; + display: flex; + justify-content: space-between; + padding: 1rem; + margin-top: 1.875rem; + margin-bottom: 1.625rem; + align-items: center; +`; + +export const ButtonContainer = styled.div` + display: flex; + gap: 1rem; +`; + +export const EventContainer = styled.div` + display: flex; + justify-content: space-between; +`; + +export const EditorContainer = styled.div` + margin-top: 1.5rem; +`; diff --git a/signoz/frontend/src/container/ExplorerControlPanel/ExplorerControlPanel.interfaces.ts b/signoz/frontend/src/container/ExplorerControlPanel/ExplorerControlPanel.interfaces.ts new file mode 100644 index 0000000..b115897 --- /dev/null +++ b/signoz/frontend/src/container/ExplorerControlPanel/ExplorerControlPanel.interfaces.ts @@ -0,0 +1,8 @@ +import { OptionsMenuConfig } from 'container/OptionsMenu/types'; + +export type ExplorerControlPanelProps = { + selectedOptionFormat: string; + isShowPageSize: boolean; + isLoading: boolean; + optionsMenuConfig?: OptionsMenuConfig; +}; diff --git a/signoz/frontend/src/container/ExplorerControlPanel/index.tsx b/signoz/frontend/src/container/ExplorerControlPanel/index.tsx new file mode 100644 index 0000000..d0823ef --- /dev/null +++ b/signoz/frontend/src/container/ExplorerControlPanel/index.tsx @@ -0,0 +1,33 @@ +import { Col, Row } from 'antd'; +import OptionsMenu from 'container/OptionsMenu'; +import PageSizeSelect from 'container/PageSizeSelect'; + +import { ExplorerControlPanelProps } from './ExplorerControlPanel.interfaces'; +import { ContainerStyled } from './styles'; + +function ExplorerControlPanel({ + selectedOptionFormat, + isLoading, + isShowPageSize, + optionsMenuConfig, +}: ExplorerControlPanelProps): JSX.Element { + return ( + + + {optionsMenuConfig && ( +