From e7791a36a36483614c9d83823f64fa05bf5e9faf Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Mar 2023 14:35:40 +0100 Subject: [PATCH 01/60] feat(assist): refactored --- .github/workflows/assist-ee.yaml | 10 +++++----- .github/workflows/assist.yaml | 8 ++++---- {utilities => assist}/.dockerignore | 0 {utilities => assist}/.gitignore | 0 {utilities => assist}/Dockerfile | 0 {utilities => assist}/build.sh | 10 +++++----- {utilities => assist}/package-lock.json | 0 {utilities => assist}/package.json | 0 {ee/utilities => assist}/run-dev.sh | 0 {utilities => assist}/server.js | 0 {utilities => assist}/servers/websocket.js | 0 {utilities => assist}/utils/HeapSnapshot.js | 0 {utilities => assist}/utils/assistHelper.js | 0 {utilities => assist}/utils/geoIP.js | 0 {utilities => assist}/utils/helper.js | 0 ee/{utilities => assist}/.gitignore | 0 ee/{utilities => assist}/Dockerfile | 0 ee/{utilities => assist}/clean-dev.sh | 0 ee/{utilities => assist}/package-lock.json | 0 ee/{utilities => assist}/package.json | 0 ee/{utilities => assist}/prepare-dev.sh | 2 +- {utilities => ee/assist}/run-dev.sh | 0 ee/{utilities => assist}/server.js | 0 ee/{utilities => assist}/servers/websocket-cluster.js | 0 ee/{utilities => assist}/servers/websocket.js | 0 ee/{utilities => assist}/utils/helper-ee.js | 0 peers/build.sh | 2 +- sourcemap-reader/build.sh | 2 +- sourcemap-reader/prepare-dev.sh | 2 +- 29 files changed, 18 insertions(+), 18 deletions(-) rename {utilities => assist}/.dockerignore (100%) rename {utilities => assist}/.gitignore (100%) rename {utilities => assist}/Dockerfile (100%) rename {utilities => assist}/build.sh (86%) rename {utilities => assist}/package-lock.json (100%) rename {utilities => assist}/package.json (100%) rename {ee/utilities => assist}/run-dev.sh (100%) rename {utilities => assist}/server.js (100%) rename {utilities => assist}/servers/websocket.js (100%) rename {utilities => assist}/utils/HeapSnapshot.js (100%) rename {utilities => assist}/utils/assistHelper.js (100%) rename {utilities => assist}/utils/geoIP.js (100%) rename {utilities => assist}/utils/helper.js (100%) rename ee/{utilities => assist}/.gitignore (100%) rename ee/{utilities => assist}/Dockerfile (100%) rename ee/{utilities => assist}/clean-dev.sh (100%) rename ee/{utilities => assist}/package-lock.json (100%) rename ee/{utilities => assist}/package.json (100%) rename ee/{utilities => assist}/prepare-dev.sh (75%) rename {utilities => ee/assist}/run-dev.sh (100%) rename ee/{utilities => assist}/server.js (100%) rename ee/{utilities => assist}/servers/websocket-cluster.js (100%) rename ee/{utilities => assist}/servers/websocket.js (100%) rename ee/{utilities => assist}/utils/helper-ee.js (100%) diff --git a/.github/workflows/assist-ee.yaml b/.github/workflows/assist-ee.yaml index e3f03ef5f..f2fa32ba2 100644 --- a/.github/workflows/assist-ee.yaml +++ b/.github/workflows/assist-ee.yaml @@ -6,10 +6,10 @@ on: - dev - api-* paths: - - "ee/utilities/**" - - "utilities/**" - - "!utilities/.gitignore" - - "!utilities/*-dev.sh" + - "ee/assist/**" + - "assist/**" + - "!assist/.gitignore" + - "!assist/*-dev.sh" name: Build and Deploy Assist EE @@ -44,7 +44,7 @@ jobs: ENVIRONMENT: staging run: | skip_security_checks=${{ github.event.inputs.skip_security_checks }} - cd utilities + cd assist PUSH_IMAGE=0 bash -x ./build.sh ee [[ "x$skip_security_checks" == "xtrue" ]] || { curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ diff --git a/.github/workflows/assist.yaml b/.github/workflows/assist.yaml index 03ee1df5f..67bfed543 100644 --- a/.github/workflows/assist.yaml +++ b/.github/workflows/assist.yaml @@ -6,9 +6,9 @@ on: - dev - api-* paths: - - "utilities/**" - - "!utilities/.gitignore" - - "!utilities/*-dev.sh" + - "assist/**" + - "!assist/.gitignore" + - "!assist/*-dev.sh" name: Build and Deploy Assist @@ -43,7 +43,7 @@ jobs: ENVIRONMENT: staging run: | skip_security_checks=${{ github.event.inputs.skip_security_checks }} - cd utilities + cd assist PUSH_IMAGE=0 bash -x ./build.sh [[ "x$skip_security_checks" == "xtrue" ]] || { curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ diff --git a/utilities/.dockerignore b/assist/.dockerignore similarity index 100% rename from utilities/.dockerignore rename to assist/.dockerignore diff --git a/utilities/.gitignore b/assist/.gitignore similarity index 100% rename from utilities/.gitignore rename to assist/.gitignore diff --git a/utilities/Dockerfile b/assist/Dockerfile similarity index 100% rename from utilities/Dockerfile rename to assist/Dockerfile diff --git a/utilities/build.sh b/assist/build.sh similarity index 86% rename from utilities/build.sh rename to assist/build.sh index 87ff7f3e6..7a780cc43 100644 --- a/utilities/build.sh +++ b/assist/build.sh @@ -16,20 +16,20 @@ check_prereq() { } function build_api(){ - destination="_utilities" + destination="_assist" [[ $1 == "ee" ]] && { - destination="_utilities_ee" + destination="_assist_ee" } - cp -R ../utilities ../${destination} + cp -R ../assist ../${destination} cd ../${destination} # Copy enterprise code [[ $1 == "ee" ]] && { - cp -rf ../ee/utilities/* ./ + cp -rf ../ee/assist/* ./ } docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/assist:${image_tag} . - cd ../utilities + cd ../assist rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { docker push ${DOCKER_REPO:-'local'}/assist:${image_tag} diff --git a/utilities/package-lock.json b/assist/package-lock.json similarity index 100% rename from utilities/package-lock.json rename to assist/package-lock.json diff --git a/utilities/package.json b/assist/package.json similarity index 100% rename from utilities/package.json rename to assist/package.json diff --git a/ee/utilities/run-dev.sh b/assist/run-dev.sh similarity index 100% rename from ee/utilities/run-dev.sh rename to assist/run-dev.sh diff --git a/utilities/server.js b/assist/server.js similarity index 100% rename from utilities/server.js rename to assist/server.js diff --git a/utilities/servers/websocket.js b/assist/servers/websocket.js similarity index 100% rename from utilities/servers/websocket.js rename to assist/servers/websocket.js diff --git a/utilities/utils/HeapSnapshot.js b/assist/utils/HeapSnapshot.js similarity index 100% rename from utilities/utils/HeapSnapshot.js rename to assist/utils/HeapSnapshot.js diff --git a/utilities/utils/assistHelper.js b/assist/utils/assistHelper.js similarity index 100% rename from utilities/utils/assistHelper.js rename to assist/utils/assistHelper.js diff --git a/utilities/utils/geoIP.js b/assist/utils/geoIP.js similarity index 100% rename from utilities/utils/geoIP.js rename to assist/utils/geoIP.js diff --git a/utilities/utils/helper.js b/assist/utils/helper.js similarity index 100% rename from utilities/utils/helper.js rename to assist/utils/helper.js diff --git a/ee/utilities/.gitignore b/ee/assist/.gitignore similarity index 100% rename from ee/utilities/.gitignore rename to ee/assist/.gitignore diff --git a/ee/utilities/Dockerfile b/ee/assist/Dockerfile similarity index 100% rename from ee/utilities/Dockerfile rename to ee/assist/Dockerfile diff --git a/ee/utilities/clean-dev.sh b/ee/assist/clean-dev.sh similarity index 100% rename from ee/utilities/clean-dev.sh rename to ee/assist/clean-dev.sh diff --git a/ee/utilities/package-lock.json b/ee/assist/package-lock.json similarity index 100% rename from ee/utilities/package-lock.json rename to ee/assist/package-lock.json diff --git a/ee/utilities/package.json b/ee/assist/package.json similarity index 100% rename from ee/utilities/package.json rename to ee/assist/package.json diff --git a/ee/utilities/prepare-dev.sh b/ee/assist/prepare-dev.sh similarity index 75% rename from ee/utilities/prepare-dev.sh rename to ee/assist/prepare-dev.sh index 2daecbfc1..8da98eac3 100755 --- a/ee/utilities/prepare-dev.sh +++ b/ee/assist/prepare-dev.sh @@ -1,2 +1,2 @@ #!/bin/bash -rsync -avr --exclude=".*" --exclude="node_modules" --ignore-existing ../../utilities/* ./ \ No newline at end of file +rsync -avr --exclude=".*" --exclude="node_modules" --ignore-existing ../../assist/* ./ \ No newline at end of file diff --git a/utilities/run-dev.sh b/ee/assist/run-dev.sh similarity index 100% rename from utilities/run-dev.sh rename to ee/assist/run-dev.sh diff --git a/ee/utilities/server.js b/ee/assist/server.js similarity index 100% rename from ee/utilities/server.js rename to ee/assist/server.js diff --git a/ee/utilities/servers/websocket-cluster.js b/ee/assist/servers/websocket-cluster.js similarity index 100% rename from ee/utilities/servers/websocket-cluster.js rename to ee/assist/servers/websocket-cluster.js diff --git a/ee/utilities/servers/websocket.js b/ee/assist/servers/websocket.js similarity index 100% rename from ee/utilities/servers/websocket.js rename to ee/assist/servers/websocket.js diff --git a/ee/utilities/utils/helper-ee.js b/ee/assist/utils/helper-ee.js similarity index 100% rename from ee/utilities/utils/helper-ee.js rename to ee/assist/utils/helper-ee.js diff --git a/peers/build.sh b/peers/build.sh index 45cc97892..746a12f9d 100644 --- a/peers/build.sh +++ b/peers/build.sh @@ -22,7 +22,7 @@ function build_api(){ } cp -R ../peers ../${destination} cd ../${destination} - cp -R ../utilities/utils . + cp -R ../assist/utils . # Copy enterprise code [[ $1 == "ee" ]] && { cp -rf ../ee/peers/* ./ diff --git a/sourcemap-reader/build.sh b/sourcemap-reader/build.sh index fbe8762e2..3025e05e4 100644 --- a/sourcemap-reader/build.sh +++ b/sourcemap-reader/build.sh @@ -30,7 +30,7 @@ function build_api(){ } cp -R ../sourcemap-reader ../${destination} cd ../${destination} - cp -R ../utilities/utils . + cp -R ../assist/utils . tag="" # Copy enterprise code [[ $1 == "ee" ]] && { diff --git a/sourcemap-reader/prepare-dev.sh b/sourcemap-reader/prepare-dev.sh index e057555db..78a315946 100755 --- a/sourcemap-reader/prepare-dev.sh +++ b/sourcemap-reader/prepare-dev.sh @@ -1,2 +1,2 @@ #!/bin/bash -rsync -avr --exclude=".*" --ignore-existing ../utilities/utils ./ \ No newline at end of file +rsync -avr --exclude=".*" --ignore-existing ../assist/utils ./ \ No newline at end of file From 2edb88f1b30c9840b47e7a92b33a9bb059c6be8d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Mar 2023 14:37:23 +0100 Subject: [PATCH 02/60] feat(DB): changed GDPR default values feat(DB): heuristics delta --- .../db/init_dbs/postgresql/1.11.0/1.11.0.sql | 24 +++++++++++++++++++ .../db/init_dbs/postgresql/init_schema.sql | 2 +- .../db/init_dbs/postgresql/1.11.0/1.11.0.sql | 20 ++++++++++++++++ .../db/init_dbs/postgresql/init_schema.sql | 2 +- 4 files changed, 46 insertions(+), 2 deletions(-) create mode 100644 ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql create mode 100644 scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql new file mode 100644 index 000000000..30bb27997 --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql @@ -0,0 +1,24 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.11.0-ee' +$$ LANGUAGE sql IMMUTABLE; + + + +ALTER TABLE events.inputs + ADD COLUMN duration integer NULL, + ADD COLUMN hesitation integer NULL; + + + +ALTER TABLE public.projects + ALTER COLUMN gdpr SET DEFAULT '{ + "maskEmails": true, + "sampleRate": 33, + "maskNumbers": false, + "defaultInputMode": "obscured" + }'::jsonb; + +COMMIT; \ No newline at end of file diff --git a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 0b2945b39..308acbda1 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -253,7 +253,7 @@ $$ "maskEmails": true, "sampleRate": 33, "maskNumbers": false, - "defaultInputMode": "plain" + "defaultInputMode": "obscured" }'::jsonb, first_recorded_session_at timestamp without time zone NULL DEFAULT NULL, sessions_last_check_at timestamp without time zone NULL DEFAULT NULL, diff --git a/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql b/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql new file mode 100644 index 000000000..13813f5bc --- /dev/null +++ b/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql @@ -0,0 +1,20 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.11.0' +$$ LANGUAGE sql IMMUTABLE; + +ALTER TABLE events.inputs + ADD COLUMN duration integer NULL, + ADD COLUMN hesitation integer NULL; + +ALTER TABLE public.projects + ALTER COLUMN gdpr SET DEFAULT '{ + "maskEmails": true, + "sampleRate": 33, + "maskNumbers": false, + "defaultInputMode": "obscured" + }'::jsonb; + +COMMIT; \ No newline at end of file diff --git a/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 57dea2a58..14b6550a2 100644 --- a/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -190,7 +190,7 @@ $$ "maskEmails": true, "sampleRate": 33, "maskNumbers": false, - "defaultInputMode": "plain" + "defaultInputMode": "obscured" }'::jsonb, first_recorded_session_at timestamp without time zone NULL DEFAULT NULL, sessions_last_check_at timestamp without time zone NULL DEFAULT NULL, From 4d85adbc0c1b4afc6854a7aa1d51967499650e08 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Mar 2023 14:53:20 +0100 Subject: [PATCH 03/60] chore(actions): changed peers/peersEE actions --- .github/workflows/peers-ee.yaml | 28 ++++++++++++++++++++-------- .github/workflows/peers.yaml | 23 +++++++++++++++++------ 2 files changed, 37 insertions(+), 14 deletions(-) diff --git a/.github/workflows/peers-ee.yaml b/.github/workflows/peers-ee.yaml index dcd003e93..b8d66a082 100644 --- a/.github/workflows/peers-ee.yaml +++ b/.github/workflows/peers-ee.yaml @@ -11,7 +11,7 @@ on: - "!peers/.gitignore" - "!peers/*-dev.sh" -name: Build and Deploy Peers +name: Build and Deploy Peers EE jobs: deploy: @@ -48,13 +48,25 @@ jobs: - name: Deploy to kubernetes run: | cd scripts/helmcharts/ - sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.EE_REGISTRY_URL }}\"#g" vars.yaml - sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\" #g" vars.yaml - sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.EE_MINIO_SECRET_KEY }}\" #g" vars.yaml - sed -i "s#domain_name.*#domain_name: \"ee.openreplay.com\" #g" vars.yaml - sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml - sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml - bash kube-install.sh --app peers + + ## Update secerts + sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.EE_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.EE_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.EE_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.EE_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.EE_DOMAIN_NAME }}\"/g" vars.yaml + sed -i "s/enterpriseEditionLicense: \"\"/enterpriseEditionLicense: \"${{ secrets.EE_LICENSE_KEY }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/peers/{n;n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,peers,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,peers,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - env: DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} diff --git a/.github/workflows/peers.yaml b/.github/workflows/peers.yaml index 2de0ae3ed..65264d7b5 100644 --- a/.github/workflows/peers.yaml +++ b/.github/workflows/peers.yaml @@ -47,13 +47,24 @@ jobs: - name: Deploy to kubernetes run: | cd scripts/helmcharts/ + + ## Update secerts sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml - sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\" #g" vars.yaml - sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\" #g" vars.yaml - sed -i "s#domain_name.*#domain_name: \"foss.openreplay.com\" #g" vars.yaml - sed -i "s#kubeconfig.*#kubeconfig_path: ${KUBECONFIG}#g" vars.yaml - sed -i "s/image_tag:.*/image_tag: \"$IMAGE_TAG\"/g" vars.yaml - bash kube-install.sh --app peers + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.OSS_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.OSS_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/peers/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,peers,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,peers,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f - env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} From 35b938238d786097729f1a7ec5f54ddfa25fd1d7 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Mar 2023 15:01:32 +0100 Subject: [PATCH 04/60] feat(assist): upgraded sub-dependencies feat(peers): upgraded sub-dependencies feat(sourcemaps-reader): upgraded sub-dependencies --- assist/package-lock.json | 12 ++-- ee/assist/package-lock.json | 34 ++++----- peers/package-lock.json | 109 +++++++++++++++++++++++++---- sourcemap-reader/package-lock.json | 6 +- 4 files changed, 123 insertions(+), 38 deletions(-) diff --git a/assist/package-lock.json b/assist/package-lock.json index aba9e43fe..683472320 100644 --- a/assist/package-lock.json +++ b/assist/package-lock.json @@ -45,9 +45,9 @@ } }, "node_modules/@types/node": { - "version": "18.14.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.1.tgz", - "integrity": "sha512-QH+37Qds3E0eDlReeboBxfHbX9omAcBCXEzswCu6jySP642jiM3cYSIkU/REqwhCUqXdonHFuBfJDiAJxMNhaQ==" + "version": "18.14.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz", + "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==" }, "node_modules/accepts": { "version": "1.3.8", @@ -987,9 +987,9 @@ } }, "node_modules/ua-parser-js": { - "version": "1.0.33", - "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.33.tgz", - "integrity": "sha512-RqshF7TPTE0XLYAqmjlu5cLLuGdKrNu9O1KLA/qp39QtbZwuzwv1dT46DZSopoUMsYgXpB3Cv8a03FI8b74oFQ==", + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.34.tgz", + "integrity": "sha512-K9mwJm/DaB6mRLZfw6q8IMXipcrmuT6yfhYmwhAkuh+81sChuYstYA+znlgaflUPaYUa3odxKPKGw6Vw/lANew==", "funding": [ { "type": "opencollective", diff --git a/ee/assist/package-lock.json b/ee/assist/package-lock.json index 1d74677cf..967198a0e 100644 --- a/ee/assist/package-lock.json +++ b/ee/assist/package-lock.json @@ -38,9 +38,9 @@ } }, "node_modules/@redis/client": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.5.5.tgz", - "integrity": "sha512-fuMnpDYSjT5JXR9rrCW1YWA4L8N/9/uS4ImT3ZEC/hcaQRI1D/9FvwjriRj1UvepIgzZXthFVKMNRzP/LNL7BQ==", + "version": "1.5.6", + "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.5.6.tgz", + "integrity": "sha512-dFD1S6je+A47Lj22jN/upVU2fj4huR7S9APd7/ziUXsIXDL+11GPYti4Suv5y8FuXaN+0ZG4JF+y1houEJ7ToA==", "dependencies": { "cluster-key-slot": "1.1.2", "generic-pool": "3.9.0", @@ -67,9 +67,9 @@ } }, "node_modules/@redis/search": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.1.1.tgz", - "integrity": "sha512-pqCXTc5e7wJJgUuJiC3hBgfoFRoPxYzwn0BEfKgejTM7M/9zP3IpUcqcjgfp8hF+LoV8rHZzcNTz7V+pEIY7LQ==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.1.2.tgz", + "integrity": "sha512-/cMfstG/fOh/SsE+4/BQGeuH/JJloeWuH+qJzM8dbxuWvdWibWAOAHHCZTMPhV3xIlH4/cUEIA8OV5QnYpaVoA==", "peerDependencies": { "@redis/client": "^1.0.0" } @@ -117,9 +117,9 @@ } }, "node_modules/@types/node": { - "version": "18.14.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.1.tgz", - "integrity": "sha512-QH+37Qds3E0eDlReeboBxfHbX9omAcBCXEzswCu6jySP642jiM3cYSIkU/REqwhCUqXdonHFuBfJDiAJxMNhaQ==" + "version": "18.14.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz", + "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==" }, "node_modules/accepts": { "version": "1.3.8", @@ -878,15 +878,15 @@ } }, "node_modules/redis": { - "version": "4.6.4", - "resolved": "https://registry.npmjs.org/redis/-/redis-4.6.4.tgz", - "integrity": "sha512-wi2tgDdQ+Q8q+PR5FLRx4QvDiWaA+PoJbrzsyFqlClN5R4LplHqN3scs/aGjE//mbz++W19SgxiEnQ27jnCRaA==", + "version": "4.6.5", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.6.5.tgz", + "integrity": "sha512-O0OWA36gDQbswOdUuAhRL6mTZpHFN525HlgZgDaVNgCJIAZR3ya06NTESb0R+TUZ+BFaDpz6NnnVvoMx9meUFg==", "dependencies": { "@redis/bloom": "1.2.0", - "@redis/client": "1.5.5", + "@redis/client": "1.5.6", "@redis/graph": "1.1.0", "@redis/json": "1.0.4", - "@redis/search": "1.1.1", + "@redis/search": "1.1.2", "@redis/time-series": "1.0.4" } }, @@ -1085,9 +1085,9 @@ } }, "node_modules/ua-parser-js": { - "version": "1.0.33", - "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.33.tgz", - "integrity": "sha512-RqshF7TPTE0XLYAqmjlu5cLLuGdKrNu9O1KLA/qp39QtbZwuzwv1dT46DZSopoUMsYgXpB3Cv8a03FI8b74oFQ==", + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.34.tgz", + "integrity": "sha512-K9mwJm/DaB6mRLZfw6q8IMXipcrmuT6yfhYmwhAkuh+81sChuYstYA+znlgaflUPaYUa3odxKPKGw6Vw/lANew==", "funding": [ { "type": "opencollective", diff --git a/peers/package-lock.json b/peers/package-lock.json index da9b72ca1..5811b59e3 100644 --- a/peers/package-lock.json +++ b/peers/package-lock.json @@ -57,9 +57,9 @@ "integrity": "sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==" }, "node_modules/@types/node": { - "version": "18.13.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.13.0.tgz", - "integrity": "sha512-gC3TazRzGoOnoKAhUx+Q0t8S9Tzs74z7m0ipwGpSqQrleP14hKxP4/JUeEQcD3W1/aIpnWl8pHowI7WokuZpXg==" + "version": "18.14.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz", + "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==" }, "node_modules/@types/qs": { "version": "6.9.7", @@ -72,9 +72,9 @@ "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" }, "node_modules/@types/serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg==", + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.1.tgz", + "integrity": "sha512-NUo5XNiAdULrJENtJXZZ3fHtfMolzZwczzBbnAeBbqBwG+LaG6YaJtuwzwGSQZ2wsCrxjEhNNjAkKigy3n8teQ==", "dependencies": { "@types/mime": "*", "@types/node": "*" @@ -243,6 +243,14 @@ "node": ">= 0.10" } }, + "node_modules/data-uri-to-buffer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", + "engines": { + "node": ">= 12" + } + }, "node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -348,6 +356,28 @@ "node": ">= 0.10.0" } }, + "node_modules/fetch-blob": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", + "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "paypal", + "url": "https://paypal.me/jimmywarting" + } + ], + "dependencies": { + "node-domexception": "^1.0.0", + "web-streams-polyfill": "^3.0.3" + }, + "engines": { + "node": "^12.20 || >= 14.13" + } + }, "node_modules/finalhandler": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", @@ -365,6 +395,17 @@ "node": ">= 0.8" } }, + "node_modules/formdata-polyfill": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", + "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "dependencies": { + "fetch-blob": "^3.1.2" + }, + "engines": { + "node": ">=12.20.0" + } + }, "node_modules/forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -540,6 +581,41 @@ "node": ">= 0.6" } }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.0.tgz", + "integrity": "sha512-BKwRP/O0UvoMKp7GNdwPlObhYGB5DQqwhEDQlNKuoqwVYSxkSZCSbHjnFFmUEtwSKRPU4kNK8PbDYYitwaE3QA==", + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", @@ -581,14 +657,15 @@ "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, "node_modules/peer": { - "version": "1.0.0-rc.9", - "resolved": "https://registry.npmjs.org/peer/-/peer-1.0.0-rc.9.tgz", - "integrity": "sha512-wjt3fWMKxM/lH/1uD5Qs9qinQ1x/aa9br1eZEQuJ2wuBBQrjAcCT85MUuY9PYcyoW5ymyABsDKC3H/q9KmZ3PA==", + "version": "1.0.0-rc.10", + "resolved": "https://registry.npmjs.org/peer/-/peer-1.0.0-rc.10.tgz", + "integrity": "sha512-S7uMqIAd1tTyvnkj4efdpn8EGc6BM1ONQvLg0vZkrnvA1cTisscBRsx+Jbor6DH68NRLnXgZbiY7/6FDER/GXw==", "dependencies": { "@types/express": "^4.17.3", "@types/ws": "^7.2.3 || ^8.0.0", "cors": "^2.8.5", "express": "^4.17.1", + "node-fetch": "^3.3.0", "ws": "^7.2.3 || ^8.0.0", "yargs": "^17.6.2" }, @@ -819,6 +896,14 @@ "node": ">= 0.8" } }, + "node_modules/web-streams-polyfill": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz", + "integrity": "sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==", + "engines": { + "node": ">= 8" + } + }, "node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -864,9 +949,9 @@ } }, "node_modules/yargs": { - "version": "17.6.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.2.tgz", - "integrity": "sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==", + "version": "17.7.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz", + "integrity": "sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==", "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", diff --git a/sourcemap-reader/package-lock.json b/sourcemap-reader/package-lock.json index cbaebc3c1..e756a0649 100644 --- a/sourcemap-reader/package-lock.json +++ b/sourcemap-reader/package-lock.json @@ -43,9 +43,9 @@ } }, "node_modules/aws-sdk": { - "version": "2.1314.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1314.0.tgz", - "integrity": "sha512-2jsfvgtOQ6kRflaicn50ndME4YoIaBhlus/dZCExtWNXeu8ePh+eAtflsYs6aqIiRPKhCBLaqClzahWm7hC0XA==", + "version": "2.1329.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1329.0.tgz", + "integrity": "sha512-F5M9x/T+PanPiYGiL95atFE6QiwzJWwgPahaEgUdq+qvVAgruiNy5t6nw2B5tBB/yWDPPavHFip3UsXeO0qU3Q==", "dependencies": { "buffer": "4.9.2", "events": "1.1.1", From ec904892e7e74ef23cfd4fc533a634ec0a28c6fe Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Mar 2023 15:09:57 +0100 Subject: [PATCH 05/60] chore(actions): changed peers/peersEE actions --- .github/workflows/peers-ee.yaml | 67 +++++++++++++++++++++++++++++++-- .github/workflows/peers.yaml | 66 ++++++++++++++++++++++++++++++-- 2 files changed, 127 insertions(+), 6 deletions(-) diff --git a/.github/workflows/peers-ee.yaml b/.github/workflows/peers-ee.yaml index b8d66a082..5981b4631 100644 --- a/.github/workflows/peers-ee.yaml +++ b/.github/workflows/peers-ee.yaml @@ -1,6 +1,11 @@ # This action will push the peers changes to aws on: workflow_dispatch: + inputs: + skip_security_checks: + description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false' + required: false + default: 'false' push: branches: - dev @@ -36,15 +41,60 @@ jobs: kubeconfig: ${{ secrets.EE_KUBECONFIG }} # Use content of kubeconfig in secret. id: setcontext - - name: Building and Pushing api image + # Caching docker images + - uses: satackey/action-docker-layer-caching@v0.0.11 + # Ignore the failure of a step and avoid terminating the job. + continue-on-error: true + + + - name: Building and Pushing peers image id: build-image env: DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging run: | - cd peers - PUSH_IMAGE=1 bash build.sh ee + skip_security_checks=${{ github.event.inputs.skip_security_checks }} + cd api + PUSH_IMAGE=0 bash -x ./build.sh ee + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("peers") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("peers") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + # We've to strip off the -ee, as helm will append it. + tag: `echo ${image_array[1]} | cut -d '-' -f 1` + EOF + done + - name: Deploy to kubernetes run: | cd scripts/helmcharts/ @@ -72,6 +122,17 @@ jobs: IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging + - name: Alert slack + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: ee + SLACK_TITLE: "Failed ${{ github.workflow }}" + SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} + SLACK_USERNAME: "OR Bot" + SLACK_MESSAGE: 'Build failed :bomb:' + # - name: Debug Job # if: ${{ failure() }} # uses: mxschmitt/action-tmate@v3 diff --git a/.github/workflows/peers.yaml b/.github/workflows/peers.yaml index 65264d7b5..ef564ec65 100644 --- a/.github/workflows/peers.yaml +++ b/.github/workflows/peers.yaml @@ -1,6 +1,11 @@ # This action will push the peers changes to aws on: workflow_dispatch: + inputs: + skip_security_checks: + description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false' + required: false + default: 'false' push: branches: - dev @@ -35,15 +40,59 @@ jobs: kubeconfig: ${{ secrets.OSS_KUBECONFIG }} # Use content of kubeconfig in secret. id: setcontext - - name: Building and Pushing api image + # Caching docker images + - uses: satackey/action-docker-layer-caching@v0.0.11 + # Ignore the failure of a step and avoid terminating the job. + continue-on-error: true + + + - name: Building and Pushing peers image id: build-image env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging run: | + skip_security_checks=${{ github.event.inputs.skip_security_checks }} cd peers - PUSH_IMAGE=1 bash build.sh + PUSH_IMAGE=0 bash -x ./build.sh + [[ "x$skip_security_checks" == "xtrue" ]] || { + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ + images=("peers") + for image in ${images[*]};do + ./trivy image --exit-code 1 --security-checks vuln --vuln-type os,library --severity "HIGH,CRITICAL" --ignore-unfixed $DOCKER_REPO/$image:$IMAGE_TAG + done + err_code=$? + [[ $err_code -ne 0 ]] && { + exit $err_code + } + } && { + echo "Skipping Security Checks" + } + images=("peers") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done + - name: Creating old image input + run: | + # + # Create yaml with existing image tags + # + kubectl get pods -n app -o jsonpath="{.items[*].spec.containers[*].image}" |\ + tr -s '[[:space:]]' '\n' | sort | uniq -c | grep '/foss/' | cut -d '/' -f3 > /tmp/image_tag.txt + + echo > /tmp/image_override.yaml + + for line in `cat /tmp/image_tag.txt`; + do + image_array=($(echo "$line" | tr ':' '\n')) + cat <> /tmp/image_override.yaml + ${image_array[0]}: + image: + tag: ${image_array[1]} + EOF + done + - name: Deploy to kubernetes run: | cd scripts/helmcharts/ @@ -70,6 +119,17 @@ jobs: IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} ENVIRONMENT: staging + - name: Alert slack + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: foss + SLACK_TITLE: "Failed ${{ github.workflow }}" + SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} + SLACK_USERNAME: "OR Bot" + SLACK_MESSAGE: 'Build failed :bomb:' + # - name: Debug Job # if: ${{ failure() }} # uses: mxschmitt/action-tmate@v3 @@ -77,4 +137,4 @@ jobs: # DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} # IMAGE_TAG: ${{ github.sha }} # ENVIRONMENT: staging - # + From 071b2e77f48ba611e819bccb1394642fecf03f79 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Mar 2023 15:16:20 +0100 Subject: [PATCH 06/60] chore(actions): changed peers/peersEE actions --- .github/workflows/peers-ee.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/peers-ee.yaml b/.github/workflows/peers-ee.yaml index 5981b4631..564c5cf6d 100644 --- a/.github/workflows/peers-ee.yaml +++ b/.github/workflows/peers-ee.yaml @@ -55,7 +55,7 @@ jobs: ENVIRONMENT: staging run: | skip_security_checks=${{ github.event.inputs.skip_security_checks }} - cd api + cd peers PUSH_IMAGE=0 bash -x ./build.sh ee [[ "x$skip_security_checks" == "xtrue" ]] || { curl -L https://github.com/aquasecurity/trivy/releases/download/v0.34.0/trivy_0.34.0_Linux-64bit.tar.gz | tar -xzf - -C ./ From 1d06e651eab1a4df4f352ff4053bdb69e6720598 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Mar 2023 16:10:08 +0100 Subject: [PATCH 07/60] feat(chalice): health-check test --- api/app.py | 5 +- api/chalicelib/core/health.py | 148 +++++++++++++++ api/routers/subs/health.py | 15 ++ ee/api/.gitignore | 1 + ee/api/app.py | 5 +- ee/api/chalicelib/core/health.py | 173 ++++++++++++++++++ ee/api/clean-dev.sh | 1 + .../db/init_dbs/clickhouse/1.11.0/1.11.0.sql | 1 + .../clickhouse/create/init_schema.sql | 1 + 9 files changed, 348 insertions(+), 2 deletions(-) create mode 100644 api/chalicelib/core/health.py create mode 100644 api/routers/subs/health.py create mode 100644 ee/api/chalicelib/core/health.py create mode 100644 ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql diff --git a/api/app.py b/api/app.py index 43c3b7945..883cf6704 100644 --- a/api/app.py +++ b/api/app.py @@ -12,7 +12,7 @@ from chalicelib.utils import pg_client from routers import core, core_dynamic from routers.crons import core_crons from routers.crons import core_dynamic_crons -from routers.subs import insights, metrics, v1_api +from routers.subs import insights, metrics, v1_api, health app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default="")) app.add_middleware(GZipMiddleware, minimum_size=1000) @@ -51,6 +51,9 @@ app.include_router(core_dynamic.app_apikey) app.include_router(metrics.app) app.include_router(insights.app) app.include_router(v1_api.app_apikey) +app.include_router(health.public_app) +app.include_router(health.app) +app.include_router(health.app_apikey) loglevel = config("LOGLEVEL", default=logging.INFO) print(f">Loglevel set to: {loglevel}") diff --git a/api/chalicelib/core/health.py b/api/chalicelib/core/health.py new file mode 100644 index 000000000..5516d7e4e --- /dev/null +++ b/api/chalicelib/core/health.py @@ -0,0 +1,148 @@ +import requests +from decouple import config + +from chalicelib.utils import pg_client + +if config("LOCAL_DEV", cast=bool, default=False): + HEALTH_ENDPOINTS = { + "alerts": "http://127.0.0.1:8888/metrics", + "assets": "http://127.0.0.1:8888/metrics", + "assist": "http://127.0.0.1:8888/metrics", + "chalice": "http://127.0.0.1:8888/metrics", + "db": "http://127.0.0.1:8888/metrics", + "ender": "http://127.0.0.1:8888/metrics", + "frontend": "http://127.0.0.1:8888/metrics", + "heuristics": "http://127.0.0.1:8888/metrics", + "http": "http://127.0.0.1:8888/metrics", + "ingress-nginx": "http://127.0.0.1:8888/metrics", + "integrations": "http://127.0.0.1:8888/metrics", + "peers": "http://127.0.0.1:8888/metrics", + "quickwit": "http://127.0.0.1:8888/metrics", + "sink": "http://127.0.0.1:8888/metrics", + "sourcemapreader": "http://127.0.0.1:8888/metrics", + "storage": "http://127.0.0.1:8888/metrics", + "utilities": "http://127.0.0.1:8888/metrics" + } + +else: + HEALTH_ENDPOINTS = { + "alerts": "http://alerts-openreplay.app.svc.cluster.local:8888/metrics", + "assets": "http://assets-openreplay.app.svc.cluster.local:8888/metrics", + "assist": "http://assist-openreplay.app.svc.cluster.local:8888/metrics", + "chalice": "http://chalice-openreplay.app.svc.cluster.local:8888/metrics", + "db": "http://db-openreplay.app.svc.cluster.local:8888/metrics", + "ender": "http://ender-openreplay.app.svc.cluster.local:8888/metrics", + "frontend": "http://frontend-openreplay.app.svc.cluster.local:8888/metrics", + "heuristics": "http://heuristics-openreplay.app.svc.cluster.local:8888/metrics", + "http": "http://http-openreplay.app.svc.cluster.local:8888/metrics", + "ingress-nginx": "http://ingress-nginx-openreplay.app.svc.cluster.local:8888/metrics", + "integrations": "http://integrations-openreplay.app.svc.cluster.local:8888/metrics", + "peers": "http://peers-openreplay.app.svc.cluster.local:8888/metrics", + "quickwit": "http://quickwit-openreplay.app.svc.cluster.local:8888/metrics", + "sink": "http://sink-openreplay.app.svc.cluster.local:8888/metrics", + "sourcemapreader": "http://sourcemapreader-openreplay.app.svc.cluster.local:8888/metrics", + "storage": "http://storage-openreplay.app.svc.cluster.local:8888/metrics", + "utilities": "http://utilities-openreplay.app.svc.cluster.local:8888/metrics", + } + + +def __check_database_pg(): + with pg_client.PostgresClient() as cur: + cur.execute("SHOW server_version;") + server_version = cur.fetchone() + cur.execute("SELECT openreplay_version() AS version;") + schema_version = cur.fetchone() + return { + "health": True, + "details": { + "version": server_version["server_version"], + "schema": schema_version["version"] + } + } + + +def __not_supported(): + return {"errors": ["not supported"]} + + +def check_be_service(service_name): + def fn(): + fail_response = { + "health": False, + "details": { + "errors": ["server health-check failed"] + } + } + try: + results = requests.get(HEALTH_ENDPOINTS.get(service_name), timeout=2) + if results.status_code != 200: + print(f"!! issue with the storage-health code:{results.status_code}") + print(results.text) + fail_response["details"]["errors"].append(results.text) + return fail_response + except requests.exceptions.Timeout: + print(f"!! Timeout getting {service_name}-health") + fail_response["details"]["errors"].append("timeout") + return fail_response + except Exception as e: + print("!! Issue getting storage-health response") + print(str(e)) + print("expected JSON, received:") + try: + print(results.text) + fail_response["details"]["errors"].append(results.text) + except: + print("couldn't get response") + fail_response["details"]["errors"].append(str(e)) + return fail_response + return { + "health": True, + "details": {} + } + + return fn + + +def get_health(): + health_map = { + "databases": { + "postgres": __check_database_pg + }, + "ingestionPipeline": { + "redis": __not_supported + }, + "backendServices": { + "alerts": check_be_service("alerts"), + "assets": check_be_service("assets"), + "assist": check_be_service("assist"), + "chalice": check_be_service("chalice"), + "db": check_be_service("db"), + "ender": check_be_service("ender"), + "frontend": check_be_service("frontend"), + "heuristics": check_be_service("heuristics"), + "http": check_be_service("http"), + "ingress-nginx": check_be_service("ingress-nginx"), + "integrations": check_be_service("integrations"), + "peers": check_be_service("peers"), + "quickwit": check_be_service("quickwit"), + "sink": check_be_service("sink"), + "sourcemapreader": check_be_service("sourcemapreader"), + "storage": check_be_service("storage"), + "utilities": check_be_service("utilities") + }, + # "overall": { + # "health": "na", + # "details": { + # "numberOfEventCaptured": "int", + # "numberOfSessionsCaptured": "int" + # }, + # "labels": { + # "parent": "information" + # } + # }, + # "ssl": True + } + for parent_key in health_map.keys(): + for element_key in health_map[parent_key]: + health_map[parent_key][element_key] = health_map[parent_key][element_key]() + return health_map diff --git a/api/routers/subs/health.py b/api/routers/subs/health.py new file mode 100644 index 000000000..6655f2a20 --- /dev/null +++ b/api/routers/subs/health.py @@ -0,0 +1,15 @@ +from typing import Union + +from fastapi import Body, Depends, Request + +import schemas +from chalicelib.core import health +from or_dependencies import OR_context +from routers.base import get_routers + +public_app, app, app_apikey = get_routers() + + +@public_app.get('/health', tags=["dashboard"]) +def get_global_health(): + return {"data": health.get_health()} diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 79aec2ade..9a9636ee1 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -264,5 +264,6 @@ Pipfile.lock /app_alerts.py /build_alerts.sh /build_crons.sh +/routers/subs/health.py /routers/subs/v1_api.py #exp /chalicelib/core/dashboards.py diff --git a/ee/api/app.py b/ee/api/app.py index a1e203005..407e4aa5b 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -18,7 +18,7 @@ from routers.crons import core_crons from routers.crons import core_dynamic_crons from routers.crons import ee_crons from routers.subs import insights, metrics, v1_api_ee -from routers.subs import v1_api +from routers.subs import v1_api, health app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default="")) app.add_middleware(GZipMiddleware, minimum_size=1000) @@ -68,6 +68,9 @@ app.include_router(metrics.app) app.include_router(insights.app) app.include_router(v1_api.app_apikey) app.include_router(v1_api_ee.app_apikey) +app.include_router(health.public_app) +app.include_router(health.app) +app.include_router(health.app_apikey) loglevel = config("LOGLEVEL", default=logging.INFO) print(f">Loglevel set to: {loglevel}") diff --git a/ee/api/chalicelib/core/health.py b/ee/api/chalicelib/core/health.py new file mode 100644 index 000000000..4c27ffe95 --- /dev/null +++ b/ee/api/chalicelib/core/health.py @@ -0,0 +1,173 @@ +import requests +from decouple import config + +from chalicelib.utils import pg_client, ch_client + +if config("LOCAL_DEV", cast=bool, default=False): + HEALTH_ENDPOINTS = { + "alerts": "http://127.0.0.1:8888/metrics", + "assets": "http://127.0.0.1:8888/metrics", + "assist": "http://127.0.0.1:8888/metrics", + "chalice": "http://127.0.0.1:8888/metrics", + "db": "http://127.0.0.1:8888/metrics", + "ender": "http://127.0.0.1:8888/metrics", + "frontend": "http://127.0.0.1:8888/metrics", + "heuristics": "http://127.0.0.1:8888/metrics", + "http": "http://127.0.0.1:8888/metrics", + "ingress-nginx": "http://127.0.0.1:8888/metrics", + "integrations": "http://127.0.0.1:8888/metrics", + "peers": "http://127.0.0.1:8888/metrics", + "quickwit": "http://127.0.0.1:8888/metrics", + "sink": "http://127.0.0.1:8888/metrics", + "sourcemapreader": "http://127.0.0.1:8888/metrics", + "storage": "http://127.0.0.1:8888/metrics", + "utilities": "http://127.0.0.1:8888/metrics" + } + +else: + HEALTH_ENDPOINTS = { + "alerts": "http://alerts-openreplay.app.svc.cluster.local:8888/metrics", + "assets": "http://assets-openreplay.app.svc.cluster.local:8888/metrics", + "assist": "http://assist-openreplay.app.svc.cluster.local:8888/metrics", + "chalice": "http://chalice-openreplay.app.svc.cluster.local:8888/metrics", + "db": "http://db-openreplay.app.svc.cluster.local:8888/metrics", + "ender": "http://ender-openreplay.app.svc.cluster.local:8888/metrics", + "frontend": "http://frontend-openreplay.app.svc.cluster.local:8888/metrics", + "heuristics": "http://heuristics-openreplay.app.svc.cluster.local:8888/metrics", + "http": "http://http-openreplay.app.svc.cluster.local:8888/metrics", + "ingress-nginx": "http://ingress-nginx-openreplay.app.svc.cluster.local:8888/metrics", + "integrations": "http://integrations-openreplay.app.svc.cluster.local:8888/metrics", + "peers": "http://peers-openreplay.app.svc.cluster.local:8888/metrics", + "quickwit": "http://quickwit-openreplay.app.svc.cluster.local:8888/metrics", + "sink": "http://sink-openreplay.app.svc.cluster.local:8888/metrics", + "sourcemapreader": "http://sourcemapreader-openreplay.app.svc.cluster.local:8888/metrics", + "storage": "http://storage-openreplay.app.svc.cluster.local:8888/metrics", + "utilities": "http://utilities-openreplay.app.svc.cluster.local:8888/metrics", + } + + +def __check_database_pg(): + with pg_client.PostgresClient() as cur: + cur.execute("SHOW server_version;") + server_version = cur.fetchone() + cur.execute("SELECT openreplay_version() AS version;") + schema_version = cur.fetchone() + return { + "health": True, + "details": { + "version": server_version["server_version"], + "schema": schema_version["version"] + } + } + + +def __check_database_ch(): + errors = {} + with ch_client.ClickHouseClient() as ch: + server_version = ch.execute("SELECT version() AS server_version;") + schema_version = ch.execute("""SELECT 1 + FROM system.functions + WHERE name = 'openreplay_version';""") + if len(schema_version) > 0: + schema_version = ch.execute("SELECT openreplay_version()() AS version;") + schema_version = schema_version[0]["version"] + else: + schema_version = "unknown" + errors = {"errors": ["clickhouse schema is outdated"]} + return { + "health": True, + "details": { + "version": server_version[0]["server_version"], + "schema": schema_version, + **errors + } + } + + +def __not_supported(): + return {"errors": ["not supported"]} + + +def check_be_service(service_name): + def fn(): + fail_response = { + "health": False, + "details": { + "errors": ["server health-check failed"] + } + } + try: + results = requests.get(HEALTH_ENDPOINTS.get(service_name), timeout=2) + if results.status_code != 200: + print(f"!! issue with the storage-health code:{results.status_code}") + print(results.text) + fail_response["details"]["errors"].append(results.text) + return fail_response + except requests.exceptions.Timeout: + print(f"!! Timeout getting {service_name}-health") + fail_response["details"]["errors"].append("timeout") + return fail_response + except Exception as e: + print("!! Issue getting storage-health response") + print(str(e)) + print("expected JSON, received:") + try: + print(results.text) + fail_response["details"]["errors"].append(results.text) + except: + print("couldn't get response") + fail_response["details"]["errors"].append(str(e)) + return fail_response + return { + "health": True, + "details": {} + } + + return fn + + +def get_health(): + health_map = { + "databases": { + "postgres": __check_database_pg, + "clickhouse": __check_database_ch + }, + "ingestionPipeline": { + "redis": __not_supported, + "kafka": __not_supported + }, + "backendServices": { + "alerts": check_be_service("alerts"), + "assets": check_be_service("assets"), + "assist": check_be_service("assist"), + "chalice": check_be_service("chalice"), + "db": check_be_service("db"), + "ender": check_be_service("ender"), + "frontend": check_be_service("frontend"), + "heuristics": check_be_service("heuristics"), + "http": check_be_service("http"), + "ingress-nginx": check_be_service("ingress-nginx"), + "integrations": check_be_service("integrations"), + "peers": check_be_service("peers"), + "quickwit": check_be_service("quickwit"), + "sink": check_be_service("sink"), + "sourcemapreader": check_be_service("sourcemapreader"), + "storage": check_be_service("storage"), + "utilities": check_be_service("utilities") + }, + # "overall": { + # "health": "na", + # "details": { + # "numberOfEventCaptured": "int", + # "numberOfSessionsCaptured": "int" + # }, + # "labels": { + # "parent": "information" + # } + # }, + # "ssl": True + } + for parent_key in health_map.keys(): + for element_key in health_map[parent_key]: + health_map[parent_key][element_key] = health_map[parent_key][element_key]() + return health_map diff --git a/ee/api/clean-dev.sh b/ee/api/clean-dev.sh index acc91e7b7..9241b8e48 100755 --- a/ee/api/clean-dev.sh +++ b/ee/api/clean-dev.sh @@ -78,6 +78,7 @@ rm -rf ./Dockerfile_bundle rm -rf ./entrypoint.bundle.sh rm -rf ./chalicelib/core/heatmaps.py rm -rf ./schemas.py +rm -rf ./routers/subs/health.py rm -rf ./routers/subs/v1_api.py #exp rm -rf ./chalicelib/core/custom_metrics.py rm -rf ./chalicelib/core/performance_event.py diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql b/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql new file mode 100644 index 000000000..5e9c11242 --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql @@ -0,0 +1 @@ +CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.11.0-ee'; \ No newline at end of file diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index 9b2cfbbd1..22d2e804e 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -1,3 +1,4 @@ +CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.11.0-ee'; CREATE DATABASE IF NOT EXISTS experimental; CREATE TABLE IF NOT EXISTS experimental.autocomplete From 86af6f37c51dff97aefd8225e16c5a72808b623d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Mar 2023 18:12:36 +0100 Subject: [PATCH 08/60] feat(chalice): health-check test --- api/chalicelib/core/health.py | 85 +++++++++--- api/env.default | 2 +- api/requirements.txt | 2 + ee/api/chalicelib/core/health.py | 223 +++++++++++++++++++------------ ee/api/env.default | 5 +- ee/api/requirements.txt | 3 + 6 files changed, 211 insertions(+), 109 deletions(-) diff --git a/api/chalicelib/core/health.py b/api/chalicelib/core/health.py index 5516d7e4e..95b4abdb9 100644 --- a/api/chalicelib/core/health.py +++ b/api/chalicelib/core/health.py @@ -1,3 +1,6 @@ +from urllib.parse import urlparse + +import redis import requests from decouple import config @@ -65,7 +68,21 @@ def __not_supported(): return {"errors": ["not supported"]} -def check_be_service(service_name): +def __always_healthy(): + return { + "health": True, + "details": {} + } + + +def __always_healthy_with_version(): + return { + "health": True, + "details": {"version": config("version_number", default="unknown")} + } + + +def __check_be_service(service_name): def fn(): fail_response = { "health": False, @@ -87,7 +104,6 @@ def check_be_service(service_name): except Exception as e: print("!! Issue getting storage-health response") print(str(e)) - print("expected JSON, received:") try: print(results.text) fail_response["details"]["errors"].append(results.text) @@ -103,32 +119,61 @@ def check_be_service(service_name): return fn +def __check_redis(): + fail_response = { + "health": False, + "details": {"errors": ["server health-check failed"]} + } + if config("REDIS_STRING", default=None) is None: + fail_response["details"]["errors"].append("REDIS_STRING not defined in env-vars") + return fail_response + + try: + u = urlparse(config("REDIS_STRING")) + r = redis.Redis(host=u.hostname, port=u.port, socket_timeout=2) + r.ping() + except Exception as e: + print("!! Issue getting assist-health response") + print(str(e)) + fail_response["details"]["errors"].append(str(e)) + return fail_response + + return { + "health": True, + "details": {"version": r.execute_command('INFO')['redis_version']} + } + + +def __check_assist(): + pass + + def get_health(): health_map = { "databases": { "postgres": __check_database_pg }, "ingestionPipeline": { - "redis": __not_supported + "redis": __check_redis }, "backendServices": { - "alerts": check_be_service("alerts"), - "assets": check_be_service("assets"), - "assist": check_be_service("assist"), - "chalice": check_be_service("chalice"), - "db": check_be_service("db"), - "ender": check_be_service("ender"), - "frontend": check_be_service("frontend"), - "heuristics": check_be_service("heuristics"), - "http": check_be_service("http"), - "ingress-nginx": check_be_service("ingress-nginx"), - "integrations": check_be_service("integrations"), - "peers": check_be_service("peers"), - "quickwit": check_be_service("quickwit"), - "sink": check_be_service("sink"), - "sourcemapreader": check_be_service("sourcemapreader"), - "storage": check_be_service("storage"), - "utilities": check_be_service("utilities") + "alerts": __check_be_service("alerts"), + "assets": __check_be_service("assets"), + "assist": __check_assist, + "chalice": __always_healthy_with_version, + "db": __check_be_service("db"), + "ender": __check_be_service("ender"), + "frontend": __check_be_service("frontend"), + "heuristics": __check_be_service("heuristics"), + "http": __check_be_service("http"), + "ingress-nginx": __always_healthy, + "integrations": __check_be_service("integrations"), + "peers": __check_be_service("peers"), + "quickwit": __check_be_service("quickwit"), + "sink": __check_be_service("sink"), + "sourcemapreader": __check_be_service("sourcemapreader"), + "storage": __check_be_service("storage"), + "utilities": __check_be_service("utilities") }, # "overall": { # "health": "na", diff --git a/api/env.default b/api/env.default index 78acd001c..12feccf1f 100644 --- a/api/env.default +++ b/api/env.default @@ -52,4 +52,4 @@ PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_SECRET= PYTHONUNBUFFERED=1 -THUMBNAILS_BUCKET=thumbnails \ No newline at end of file +REDIS_STRING=redis://redis-master.db.svc.cluster.local:6379 \ No newline at end of file diff --git a/api/requirements.txt b/api/requirements.txt index 0a058a94f..4a8d35090 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -13,3 +13,5 @@ uvicorn[standard]==0.20.0 python-decouple==3.7 pydantic[email]==1.10.4 apscheduler==3.10.0 + +redis==4.5.1 \ No newline at end of file diff --git a/ee/api/chalicelib/core/health.py b/ee/api/chalicelib/core/health.py index 4c27ffe95..4de9844f0 100644 --- a/ee/api/chalicelib/core/health.py +++ b/ee/api/chalicelib/core/health.py @@ -1,3 +1,6 @@ +from urllib.parse import urlparse + +import redis import requests from decouple import config @@ -61,6 +64,137 @@ def __check_database_pg(): } +def __not_supported(): + return {"errors": ["not supported"]} + + +def __always_healthy(): + return { + "health": True, + "details": {} + } + + +def __always_healthy_with_version(): + return { + "health": True, + "details": {"version": config("version_number", default="unknown")} + } + + +def __check_be_service(service_name): + def fn(): + fail_response = { + "health": False, + "details": { + "errors": ["server health-check failed"] + } + } + try: + results = requests.get(HEALTH_ENDPOINTS.get(service_name), timeout=2) + if results.status_code != 200: + print(f"!! issue with the storage-health code:{results.status_code}") + print(results.text) + fail_response["details"]["errors"].append(results.text) + return fail_response + except requests.exceptions.Timeout: + print(f"!! Timeout getting {service_name}-health") + fail_response["details"]["errors"].append("timeout") + return fail_response + except Exception as e: + print("!! Issue getting storage-health response") + print(str(e)) + try: + print(results.text) + fail_response["details"]["errors"].append(results.text) + except: + print("couldn't get response") + fail_response["details"]["errors"].append(str(e)) + return fail_response + return { + "health": True, + "details": {} + } + + return fn + + +def __check_redis(): + fail_response = { + "health": False, + "details": {"errors": ["server health-check failed"]} + } + if config("REDIS_STRING", default=None) is None: + fail_response["details"]["errors"].append("REDIS_STRING not defined in env-vars") + return fail_response + + try: + u = urlparse(config("REDIS_STRING")) + r = redis.Redis(host=u.hostname, port=u.port, socket_timeout=2) + r.ping() + except Exception as e: + print("!! Issue getting assist-health response") + print(str(e)) + fail_response["details"]["errors"].append(str(e)) + return fail_response + + return { + "health": True, + "details": {"version": r.execute_command('INFO')['redis_version']} + } + + +def __check_assist(): + pass + + +def get_health(): + health_map = { + "databases": { + "postgres": __check_database_pg, + "clickhouse": __check_database_ch + }, + "ingestionPipeline": { + "redis": __check_redis, + "kafka": __not_supported + }, + "backendServices": { + "alerts": __check_be_service("alerts"), + "assets": __check_be_service("assets"), + "assist": __check_assist, + "chalice": __always_healthy_with_version, + "db": __check_be_service("db"), + "ender": __check_be_service("ender"), + "frontend": __check_be_service("frontend"), + "heuristics": __check_be_service("heuristics"), + "http": __check_be_service("http"), + "ingress-nginx": __always_healthy, + "integrations": __check_be_service("integrations"), + "peers": __check_be_service("peers"), + "quickwit": __check_be_service("quickwit"), + "sink": __check_be_service("sink"), + "sourcemapreader": __check_be_service("sourcemapreader"), + "storage": __check_be_service("storage"), + "utilities": __check_be_service("utilities") + }, + # "overall": { + # "health": "na", + # "details": { + # "numberOfEventCaptured": "int", + # "numberOfSessionsCaptured": "int" + # }, + # "labels": { + # "parent": "information" + # } + # }, + # "ssl": True + } + for parent_key in health_map.keys(): + for element_key in health_map[parent_key]: + health_map[parent_key][element_key] = health_map[parent_key][element_key]() + return health_map + + def __check_database_ch(): errors = {} with ch_client.ClickHouseClient() as ch: @@ -84,90 +218,5 @@ def __check_database_ch(): } -def __not_supported(): - return {"errors": ["not supported"]} - - -def check_be_service(service_name): - def fn(): - fail_response = { - "health": False, - "details": { - "errors": ["server health-check failed"] - } - } - try: - results = requests.get(HEALTH_ENDPOINTS.get(service_name), timeout=2) - if results.status_code != 200: - print(f"!! issue with the storage-health code:{results.status_code}") - print(results.text) - fail_response["details"]["errors"].append(results.text) - return fail_response - except requests.exceptions.Timeout: - print(f"!! Timeout getting {service_name}-health") - fail_response["details"]["errors"].append("timeout") - return fail_response - except Exception as e: - print("!! Issue getting storage-health response") - print(str(e)) - print("expected JSON, received:") - try: - print(results.text) - fail_response["details"]["errors"].append(results.text) - except: - print("couldn't get response") - fail_response["details"]["errors"].append(str(e)) - return fail_response - return { - "health": True, - "details": {} - } - - return fn - - -def get_health(): - health_map = { - "databases": { - "postgres": __check_database_pg, - "clickhouse": __check_database_ch - }, - "ingestionPipeline": { - "redis": __not_supported, - "kafka": __not_supported - }, - "backendServices": { - "alerts": check_be_service("alerts"), - "assets": check_be_service("assets"), - "assist": check_be_service("assist"), - "chalice": check_be_service("chalice"), - "db": check_be_service("db"), - "ender": check_be_service("ender"), - "frontend": check_be_service("frontend"), - "heuristics": check_be_service("heuristics"), - "http": check_be_service("http"), - "ingress-nginx": check_be_service("ingress-nginx"), - "integrations": check_be_service("integrations"), - "peers": check_be_service("peers"), - "quickwit": check_be_service("quickwit"), - "sink": check_be_service("sink"), - "sourcemapreader": check_be_service("sourcemapreader"), - "storage": check_be_service("storage"), - "utilities": check_be_service("utilities") - }, - # "overall": { - # "health": "na", - # "details": { - # "numberOfEventCaptured": "int", - # "numberOfSessionsCaptured": "int" - # }, - # "labels": { - # "parent": "information" - # } - # }, - # "ssl": True - } - for parent_key in health_map.keys(): - for element_key in health_map[parent_key]: - health_map[parent_key][element_key] = health_map[parent_key][element_key]() - return health_map +def __check_kafka(): + pass diff --git a/ee/api/env.default b/ee/api/env.default index cdbc3d256..df353d071 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -70,4 +70,7 @@ SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 -ASSIST_JWT_SECRET= \ No newline at end of file +ASSIST_JWT_SECRET= +REDIS_STRING=redis://redis-master.db.svc.cluster.local:6379 +KAFKA_SERVERS=kafka.db.svc.cluster.local:9092 +KAFKA_USE_SSL=false \ No newline at end of file diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index c8b76e700..3d97c63e6 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -17,3 +17,6 @@ apscheduler==3.10.0 clickhouse-driver==0.2.5 python3-saml==1.15.0 python-multipart==0.0.5 + +redis==4.5.1 +kafka-python==2.0.2 \ No newline at end of file From 687dfc1857779d618825a39358568131a1890c5a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Mar 2023 18:16:10 +0100 Subject: [PATCH 09/60] chore(actions): ignore chalice vulnerability check --- .github/workflows/api-ee.yaml | 2 +- .github/workflows/api.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/api-ee.yaml b/.github/workflows/api-ee.yaml index f9a1730f1..8feec182c 100644 --- a/.github/workflows/api-ee.yaml +++ b/.github/workflows/api-ee.yaml @@ -5,7 +5,7 @@ on: skip_security_checks: description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false' required: false - default: 'false' + default: 'true' push: branches: - dev diff --git a/.github/workflows/api.yaml b/.github/workflows/api.yaml index 8e2f7fa7b..9537d97b6 100644 --- a/.github/workflows/api.yaml +++ b/.github/workflows/api.yaml @@ -5,7 +5,7 @@ on: skip_security_checks: description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false' required: false - default: 'false' + default: 'true' push: branches: - dev From d86b71f66e68f083a25279a2e7ed7f892332838d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 7 Mar 2023 18:36:48 +0100 Subject: [PATCH 10/60] feat(chalice): health-check test --- api/chalicelib/core/health.py | 2 +- ee/api/chalicelib/core/health.py | 32 +++++++++++++++++++++++++++++--- 2 files changed, 30 insertions(+), 4 deletions(-) diff --git a/api/chalicelib/core/health.py b/api/chalicelib/core/health.py index 95b4abdb9..071cf7f9c 100644 --- a/api/chalicelib/core/health.py +++ b/api/chalicelib/core/health.py @@ -133,7 +133,7 @@ def __check_redis(): r = redis.Redis(host=u.hostname, port=u.port, socket_timeout=2) r.ping() except Exception as e: - print("!! Issue getting assist-health response") + print("!! Issue getting redis-health response") print(str(e)) fail_response["details"]["errors"].append(str(e)) return fail_response diff --git a/ee/api/chalicelib/core/health.py b/ee/api/chalicelib/core/health.py index 4de9844f0..6139992a0 100644 --- a/ee/api/chalicelib/core/health.py +++ b/ee/api/chalicelib/core/health.py @@ -3,6 +3,7 @@ from urllib.parse import urlparse import redis import requests from decouple import config +import kafka from chalicelib.utils import pg_client, ch_client @@ -133,7 +134,7 @@ def __check_redis(): r = redis.Redis(host=u.hostname, port=u.port, socket_timeout=2) r.ping() except Exception as e: - print("!! Issue getting assist-health response") + print("!! Issue getting redis-health response") print(str(e)) fail_response["details"]["errors"].append(str(e)) return fail_response @@ -156,7 +157,7 @@ def get_health(): }, "ingestionPipeline": { "redis": __check_redis, - "kafka": __not_supported + "kafka": __check_kafka }, "backendServices": { "alerts": __check_be_service("alerts"), @@ -219,4 +220,29 @@ def __check_database_ch(): def __check_kafka(): - pass + fail_response = { + "health": False, + "details": {"errors": ["server health-check failed"]} + } + if config("KAFKA_SERVERS", default=None) is None: + fail_response["details"]["errors"].append("KAFKA_SERVERS not defined in env-vars") + return fail_response + + try: + # consumer = kafka.KafkaConsumer(group_id='test', bootstrap_servers=[config("KAFKA_SERVERS")]) + # topics = consumer.topics() + # + # if not topics: + # raise RuntimeError() + client =kafka.KafkaClient(bootstrap_servers=[config("KAFKA_SERVERS")]) + except Exception as e: + print("!! Issue getting kafka-health response") + print(str(e)) + fail_response["details"]["errors"].append(str(e)) + return fail_response + + return { + "health": True, + "details": {"version": r.execute_command('INFO')['redis_version']} + } + From dbc031f4a8737971457555d86b47fe9f735b1edb Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 8 Mar 2023 11:47:58 +0100 Subject: [PATCH 11/60] chore(actions): changed actions --- .github/workflows/api-ee.yaml | 2 +- .github/workflows/api.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/api-ee.yaml b/.github/workflows/api-ee.yaml index 8feec182c..f9a1730f1 100644 --- a/.github/workflows/api-ee.yaml +++ b/.github/workflows/api-ee.yaml @@ -5,7 +5,7 @@ on: skip_security_checks: description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false' required: false - default: 'true' + default: 'false' push: branches: - dev diff --git a/.github/workflows/api.yaml b/.github/workflows/api.yaml index 9537d97b6..8e2f7fa7b 100644 --- a/.github/workflows/api.yaml +++ b/.github/workflows/api.yaml @@ -5,7 +5,7 @@ on: skip_security_checks: description: 'Skip Security checks if there is a unfixable vuln or error. Value: true/false' required: false - default: 'true' + default: 'false' push: branches: - dev From d95e7c62118264fa41733b09879543986cabc682 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Mar 2023 14:58:32 +0100 Subject: [PATCH 12/60] feat(assist): allow maxHttpBufferSize in bytes --- assist/servers/websocket.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/assist/servers/websocket.js b/assist/servers/websocket.js index f5d029bc2..0258960d0 100644 --- a/assist/servers/websocket.js +++ b/assist/servers/websocket.js @@ -26,7 +26,8 @@ const debug = process.env.debug === "1"; const createSocketIOServer = function (server, prefix) { io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + // maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: parseInt(process.env.maxHttpBufferSizeBytes), cors: { origin: "*", methods: ["GET", "POST", "PUT"] From f8311b8b381dc0dee9a7ccb6d82ff96b601d298c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Mar 2023 15:09:31 +0100 Subject: [PATCH 13/60] chore(actions): changed actions --- .github/workflows/assist-ee.yaml | 4 ++-- .github/workflows/assist.yaml | 4 ++-- .github/workflows/sourcemaps-reader.yaml | 8 ++++---- assist/Dockerfile | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/assist-ee.yaml b/.github/workflows/assist-ee.yaml index f2fa32ba2..44fcb5650 100644 --- a/.github/workflows/assist-ee.yaml +++ b/.github/workflows/assist-ee.yaml @@ -101,9 +101,9 @@ jobs: cat /tmp/image_override.yaml # Deploy command - mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + mv openreplay/charts/{ingress-nginx,assist,quickwit} /tmp rm -rf openreplay/charts/* - mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + mv /tmp/{ingress-nginx,assist,quickwit} openreplay/charts/ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - env: DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} diff --git a/.github/workflows/assist.yaml b/.github/workflows/assist.yaml index 67bfed543..37582d7d0 100644 --- a/.github/workflows/assist.yaml +++ b/.github/workflows/assist.yaml @@ -100,9 +100,9 @@ jobs: cat /tmp/image_override.yaml # Deploy command - mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + mv openreplay/charts/{ingress-nginx,assist,quickwit} /tmp rm -rf openreplay/charts/* - mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + mv /tmp/{ingress-nginx,assist,quickwit} openreplay/charts/ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f - env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} diff --git a/.github/workflows/sourcemaps-reader.yaml b/.github/workflows/sourcemaps-reader.yaml index f0059da40..5b7c11d01 100644 --- a/.github/workflows/sourcemaps-reader.yaml +++ b/.github/workflows/sourcemaps-reader.yaml @@ -1,4 +1,4 @@ -# This action will push the chalice changes to aws +# This action will push the sourcemapreader changes to aws on: workflow_dispatch: push: @@ -83,13 +83,13 @@ jobs: sed -i "s/domainName: \"\"/domainName: \"${{ secrets.OSS_DOMAIN_NAME }}\"/g" vars.yaml # Update changed image tag - sed -i "/chalice/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + sed -i "/sourcemapreader/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml cat /tmp/image_override.yaml # Deploy command - mv openreplay/charts/{ingress-nginx,chalice,quickwit} /tmp + mv openreplay/charts/{ingress-nginx,sourcemapreader,quickwit} /tmp rm -rf openreplay/charts/* - mv /tmp/{ingress-nginx,chalice,quickwit} openreplay/charts/ + mv /tmp/{ingress-nginx,sourcemapreader,quickwit} openreplay/charts/ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f - env: DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} diff --git a/assist/Dockerfile b/assist/Dockerfile index edbaae03c..84b54c906 100644 --- a/assist/Dockerfile +++ b/assist/Dockerfile @@ -18,4 +18,4 @@ USER 1001 ADD --chown=1001 https://static.openreplay.com/geoip/GeoLite2-Country.mmdb $MAXMINDDB_FILE ENTRYPOINT ["/sbin/tini", "--"] -CMD npm start +CMD npm start \ No newline at end of file From 1d7a5446cc7d042c08b0234d05132e643c906242 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Thu, 9 Mar 2023 15:43:29 +0100 Subject: [PATCH 14/60] feat(assist): accept maxHttpBufferSize as a float --- assist/servers/websocket.js | 3 +-- ee/assist/servers/websocket-cluster.js | 4 ++-- ee/assist/servers/websocket.js | 4 ++-- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/assist/servers/websocket.js b/assist/servers/websocket.js index 0258960d0..4c4a657bb 100644 --- a/assist/servers/websocket.js +++ b/assist/servers/websocket.js @@ -26,8 +26,7 @@ const debug = process.env.debug === "1"; const createSocketIOServer = function (server, prefix) { io = _io(server, { - // maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, - maxHttpBufferSize: parseInt(process.env.maxHttpBufferSizeBytes), + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] diff --git a/ee/assist/servers/websocket-cluster.js b/ee/assist/servers/websocket-cluster.js index e129bfcb6..a1f389685 100644 --- a/ee/assist/servers/websocket-cluster.js +++ b/ee/assist/servers/websocket-cluster.js @@ -34,7 +34,7 @@ const debug = process.env.debug === "1"; const createSocketIOServer = function (server, prefix) { if (process.env.uws !== "true") { io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] @@ -43,7 +43,7 @@ const createSocketIOServer = function (server, prefix) { }); } else { io = new _io.Server({ - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] diff --git a/ee/assist/servers/websocket.js b/ee/assist/servers/websocket.js index c906b5987..330361df3 100644 --- a/ee/assist/servers/websocket.js +++ b/ee/assist/servers/websocket.js @@ -29,7 +29,7 @@ const debug = process.env.debug === "1"; const createSocketIOServer = function (server, prefix) { if (process.env.uws !== "true") { io = _io(server, { - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] @@ -38,7 +38,7 @@ const createSocketIOServer = function (server, prefix) { }); } else { io = new _io.Server({ - maxHttpBufferSize: (parseInt(process.env.maxHttpBufferSize) || 5) * 1e6, + maxHttpBufferSize: (parseFloat(process.env.maxHttpBufferSize) || 5) * 1e6, cors: { origin: "*", methods: ["GET", "POST", "PUT"] From c1ec53c39eb82b7e5fac547bbd9ea184e0b06fce Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 10 Mar 2023 14:48:06 +0100 Subject: [PATCH 15/60] feat(chalice): health-check for kafka --- ee/api/chalicelib/core/health.py | 16 +++++++--------- ee/api/requirements.txt | 2 +- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/ee/api/chalicelib/core/health.py b/ee/api/chalicelib/core/health.py index 6139992a0..784dfceab 100644 --- a/ee/api/chalicelib/core/health.py +++ b/ee/api/chalicelib/core/health.py @@ -2,8 +2,8 @@ from urllib.parse import urlparse import redis import requests +from confluent_kafka.admin import AdminClient from decouple import config -import kafka from chalicelib.utils import pg_client, ch_client @@ -229,12 +229,11 @@ def __check_kafka(): return fail_response try: - # consumer = kafka.KafkaConsumer(group_id='test', bootstrap_servers=[config("KAFKA_SERVERS")]) - # topics = consumer.topics() - # - # if not topics: - # raise RuntimeError() - client =kafka.KafkaClient(bootstrap_servers=[config("KAFKA_SERVERS")]) + a = AdminClient({'bootstrap.servers': config("KAFKA_SERVERS"), "socket.connection.setup.timeout.ms": 3000}) + topics = a.list_topics().topics + if not topics: + raise Exception('topics not found') + except Exception as e: print("!! Issue getting kafka-health response") print(str(e)) @@ -243,6 +242,5 @@ def __check_kafka(): return { "health": True, - "details": {"version": r.execute_command('INFO')['redis_version']} + "details": {} } - diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 3d97c63e6..9ce06fe06 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -19,4 +19,4 @@ python3-saml==1.15.0 python-multipart==0.0.5 redis==4.5.1 -kafka-python==2.0.2 \ No newline at end of file +confluent-kafka==2.0.2 \ No newline at end of file From c07474bdfafe5c35baa01d3dc446ae1ad32d8c7b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Mar 2023 17:02:11 +0100 Subject: [PATCH 16/60] feat(chalice): health-check feat(assist): health-check feat(peers): health-check feat(sourcemaps-reader): health-check --- api/chalicelib/core/health.py | 10 ++- assist/package.json | 2 +- assist/server.js | 16 ++--- assist/servers/websocket.js | 18 +++++- assist/utils/health.js | 54 ++++++++++++++++ ee/api/chalicelib/core/health.py | 12 ++-- ee/assist/package-lock.json | 10 +-- ee/assist/package.json | 2 +- ee/assist/server.js | 21 ++----- ee/assist/servers/websocket-cluster.js | 17 ++++++ ee/assist/servers/websocket.js | 18 ++++++ ee/assist/utils/health.js | 61 +++++++++++++++++++ peers/clean-dev.sh | 3 + peers/package-lock.json | 28 ++++----- peers/package.json | 2 +- peers/prepare-dev.sh | 3 + peers/run-dev.sh | 6 ++ peers/server.js | 9 +-- .../db/init_dbs/postgresql/1.11.0/1.11.0.sql | 5 ++ sourcemap-reader/.gitignore | 5 +- sourcemap-reader/clean-dev.sh | 5 +- sourcemap-reader/package-lock.json | 10 +-- sourcemap-reader/package.json | 2 +- sourcemap-reader/server.js | 14 ++--- sourcemap-reader/utils/health.js | 52 ++++++++++++++++ 25 files changed, 296 insertions(+), 89 deletions(-) create mode 100644 assist/utils/health.js create mode 100644 ee/assist/utils/health.js create mode 100755 peers/clean-dev.sh create mode 100755 peers/prepare-dev.sh create mode 100755 peers/run-dev.sh create mode 100644 sourcemap-reader/utils/health.js diff --git a/api/chalicelib/core/health.py b/api/chalicelib/core/health.py index 071cf7f9c..0a73661b7 100644 --- a/api/chalicelib/core/health.py +++ b/api/chalicelib/core/health.py @@ -31,7 +31,7 @@ else: HEALTH_ENDPOINTS = { "alerts": "http://alerts-openreplay.app.svc.cluster.local:8888/metrics", "assets": "http://assets-openreplay.app.svc.cluster.local:8888/metrics", - "assist": "http://assist-openreplay.app.svc.cluster.local:8888/metrics", + "assist": "http://assist-openreplay.app.svc.cluster.local:8888/health", "chalice": "http://chalice-openreplay.app.svc.cluster.local:8888/metrics", "db": "http://db-openreplay.app.svc.cluster.local:8888/metrics", "ender": "http://ender-openreplay.app.svc.cluster.local:8888/metrics", @@ -40,12 +40,11 @@ else: "http": "http://http-openreplay.app.svc.cluster.local:8888/metrics", "ingress-nginx": "http://ingress-nginx-openreplay.app.svc.cluster.local:8888/metrics", "integrations": "http://integrations-openreplay.app.svc.cluster.local:8888/metrics", - "peers": "http://peers-openreplay.app.svc.cluster.local:8888/metrics", + "peers": "http://peers-openreplay.app.svc.cluster.local:8888/health", "quickwit": "http://quickwit-openreplay.app.svc.cluster.local:8888/metrics", "sink": "http://sink-openreplay.app.svc.cluster.local:8888/metrics", - "sourcemapreader": "http://sourcemapreader-openreplay.app.svc.cluster.local:8888/metrics", + "sourcemapreader": "http://sourcemapreader-openreplay.app.svc.cluster.local:8888/health", "storage": "http://storage-openreplay.app.svc.cluster.local:8888/metrics", - "utilities": "http://utilities-openreplay.app.svc.cluster.local:8888/metrics", } @@ -172,8 +171,7 @@ def get_health(): "quickwit": __check_be_service("quickwit"), "sink": __check_be_service("sink"), "sourcemapreader": __check_be_service("sourcemapreader"), - "storage": __check_be_service("storage"), - "utilities": __check_be_service("utilities") + "storage": __check_be_service("storage") }, # "overall": { # "health": "na", diff --git a/assist/package.json b/assist/package.json index b06c8cae5..ad9794fea 100644 --- a/assist/package.json +++ b/assist/package.json @@ -1,6 +1,6 @@ { "name": "assist-server", - "version": "1.0.0", + "version": "v1.11.0", "description": "assist server to get live sessions & sourcemaps reader to get stack trace", "main": "peerjs-server.js", "scripts": { diff --git a/assist/server.js b/assist/server.js index d71aca65d..5eb6c2e16 100644 --- a/assist/server.js +++ b/assist/server.js @@ -2,6 +2,7 @@ const dumps = require('./utils/HeapSnapshot'); const express = require('express'); const socket = require("./servers/websocket"); const {request_logger} = require("./utils/helper"); +const health = require("./utils/health"); const assert = require('assert').strict; const debug = process.env.debug === "1"; @@ -10,7 +11,7 @@ const HOST = process.env.LISTEN_HOST || '0.0.0.0'; const PORT = process.env.LISTEN_PORT || 9001; assert.ok(process.env.ASSIST_KEY, 'The "ASSIST_KEY" environment variable is required'); const P_KEY = process.env.ASSIST_KEY; -const PREFIX = process.env.PREFIX || process.env.prefix || `/assist` +const PREFIX = process.env.PREFIX || process.env.prefix || `/assist`; const wsapp = express(); wsapp.use(express.json()); @@ -27,16 +28,9 @@ heapdump && wsapp.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router); const wsserver = wsapp.listen(PORT, HOST, () => { console.log(`WS App listening on http://${HOST}:${PORT}`); - console.log('Press Ctrl+C to quit.'); + health.healthApp.listen(health.PORT, HOST, health.listen_cb); }); + wsapp.enable('trust proxy'); socket.start(wsserver); -module.exports = {wsserver}; - -wsapp.get('/private/shutdown', (req, res) => { - console.log("Requested shutdown"); - res.statusCode = 200; - res.end("ok!"); - process.kill(1, "SIGTERM"); - } -); \ No newline at end of file +module.exports = {wsserver}; \ No newline at end of file diff --git a/assist/servers/websocket.js b/assist/servers/websocket.js index 4c4a657bb..0fdda85f2 100644 --- a/assist/servers/websocket.js +++ b/assist/servers/websocket.js @@ -45,7 +45,22 @@ const respond = function (res, data) { res.setHeader('Content-Type', 'application/json'); res.end(JSON.stringify({"data": data})); } - +const countSessions = async function () { + let count = 0; + try { + const arr = Array.from(io.sockets.adapter.rooms); + const filtered = arr.filter(room => !room[1].has(room[0])); + for (let i of filtered) { + let {projectKey, sessionId} = extractPeerId(i[0]); + if (projectKey !== null && sessionId !== null) { + count++; + } + } + } catch (e) { + console.error(e); + } + return count; +} const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); let filters = extractPayloadFromRequest(req); @@ -360,6 +375,7 @@ module.exports = { socketConnexionTimeout(io); }, + countSessions, handlers: { socketsList, socketsListByProject, diff --git a/assist/utils/health.js b/assist/utils/health.js new file mode 100644 index 000000000..d71864e71 --- /dev/null +++ b/assist/utils/health.js @@ -0,0 +1,54 @@ +const express = require('express'); +const socket = require("../servers/websocket"); +const HOST = process.env.LISTEN_HOST || '0.0.0.0'; +const PORT = process.env.HEALTH_PORT || 8888; + + +const {request_logger} = require("./helper"); +const debug = process.env.debug === "1"; +const respond = function (res, data) { + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify({"data": data})); +} + +const check_health = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + respond(res, { + "health": true, + "details": { + "version": process.env.npm_package_version, + "connectedSessions": await socket.countSessions() + } + }); +} + + +const healthApp = express(); +healthApp.use(express.json()); +healthApp.use(express.urlencoded({extended: true})); +healthApp.use(request_logger("[healthApp]")); +healthApp.get(['/'], (req, res) => { + res.statusCode = 200; + res.end("healthApp ok!"); + } +); +healthApp.get('/health', check_health); +healthApp.get('/shutdown', (req, res) => { + console.log("Requested shutdown"); + res.statusCode = 200; + res.end("ok!"); + process.kill(1, "SIGTERM"); + } +); + +const listen_cb = async function () { + console.log(`Health App listening on http://${HOST}:${PORT}`); + console.log('Press Ctrl+C to quit.'); +} + +module.exports = { + healthApp, + PORT, + listen_cb +}; diff --git a/ee/api/chalicelib/core/health.py b/ee/api/chalicelib/core/health.py index 784dfceab..514425ddf 100644 --- a/ee/api/chalicelib/core/health.py +++ b/ee/api/chalicelib/core/health.py @@ -32,7 +32,7 @@ else: HEALTH_ENDPOINTS = { "alerts": "http://alerts-openreplay.app.svc.cluster.local:8888/metrics", "assets": "http://assets-openreplay.app.svc.cluster.local:8888/metrics", - "assist": "http://assist-openreplay.app.svc.cluster.local:8888/metrics", + "assist": "http://assist-openreplay.app.svc.cluster.local:8888/health", "chalice": "http://chalice-openreplay.app.svc.cluster.local:8888/metrics", "db": "http://db-openreplay.app.svc.cluster.local:8888/metrics", "ender": "http://ender-openreplay.app.svc.cluster.local:8888/metrics", @@ -41,12 +41,11 @@ else: "http": "http://http-openreplay.app.svc.cluster.local:8888/metrics", "ingress-nginx": "http://ingress-nginx-openreplay.app.svc.cluster.local:8888/metrics", "integrations": "http://integrations-openreplay.app.svc.cluster.local:8888/metrics", - "peers": "http://peers-openreplay.app.svc.cluster.local:8888/metrics", + "peers": "http://peers-openreplay.app.svc.cluster.local:8888/health", "quickwit": "http://quickwit-openreplay.app.svc.cluster.local:8888/metrics", "sink": "http://sink-openreplay.app.svc.cluster.local:8888/metrics", - "sourcemapreader": "http://sourcemapreader-openreplay.app.svc.cluster.local:8888/metrics", - "storage": "http://storage-openreplay.app.svc.cluster.local:8888/metrics", - "utilities": "http://utilities-openreplay.app.svc.cluster.local:8888/metrics", + "sourcemapreader": "http://sourcemapreader-openreplay.app.svc.cluster.local:8888/health", + "storage": "http://storage-openreplay.app.svc.cluster.local:8888/metrics" } @@ -175,8 +174,7 @@ def get_health(): "quickwit": __check_be_service("quickwit"), "sink": __check_be_service("sink"), "sourcemapreader": __check_be_service("sourcemapreader"), - "storage": __check_be_service("storage"), - "utilities": __check_be_service("utilities") + "storage": __check_be_service("storage") }, # "overall": { # "health": "na", diff --git a/ee/assist/package-lock.json b/ee/assist/package-lock.json index 967198a0e..a94f1d5bb 100644 --- a/ee/assist/package-lock.json +++ b/ee/assist/package-lock.json @@ -1,12 +1,12 @@ { "name": "assist-server", - "version": "1.0.0", + "version": "v1.11.0-ee", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "assist-server", - "version": "1.0.0", + "version": "v1.11.0-ee", "license": "Elastic License 2.0 (ELv2)", "dependencies": { "@maxmind/geoip2-node": "^3.5.0", @@ -117,9 +117,9 @@ } }, "node_modules/@types/node": { - "version": "18.14.6", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz", - "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==" + "version": "18.15.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.1.tgz", + "integrity": "sha512-U2TWca8AeHSmbpi314QBESRk7oPjSZjDsR+c+H4ECC1l+kFgpZf8Ydhv3SJpPy51VyZHHqxlb6mTTqYNNRVAIw==" }, "node_modules/accepts": { "version": "1.3.8", diff --git a/ee/assist/package.json b/ee/assist/package.json index 3fcedf03b..4ef88774a 100644 --- a/ee/assist/package.json +++ b/ee/assist/package.json @@ -1,6 +1,6 @@ { "name": "assist-server", - "version": "1.0.0", + "version": "v1.11.0-ee", "description": "assist server to get live sessions & sourcemaps reader to get stack trace", "main": "peerjs-server.js", "scripts": { diff --git a/ee/assist/server.js b/ee/assist/server.js index 482ddcd17..414d7ac2c 100644 --- a/ee/assist/server.js +++ b/ee/assist/server.js @@ -1,6 +1,7 @@ const dumps = require('./utils/HeapSnapshot'); const {request_logger} = require('./utils/helper'); const express = require('express'); +const health = require("./utils/health"); const assert = require('assert').strict; let socket; @@ -14,7 +15,7 @@ const HOST = process.env.LISTEN_HOST || '0.0.0.0'; const PORT = process.env.LISTEN_PORT || 9001; assert.ok(process.env.ASSIST_KEY, 'The "ASSIST_KEY" environment variable is required'); const P_KEY = process.env.ASSIST_KEY; -const PREFIX = process.env.PREFIX || process.env.prefix || `/assist` +const PREFIX = process.env.PREFIX || process.env.prefix || `/assist`; let debug = process.env.debug === "1"; const heapdump = process.env.heapdump === "1"; @@ -31,18 +32,11 @@ if (process.env.uws !== "true") { ); heapdump && wsapp.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router); wsapp.use(`${PREFIX}/${P_KEY}`, socket.wsRouter); - wsapp.get('/private/shutdown', (req, res) => { - console.log("Requested shutdown"); - res.statusCode = 200; - res.end("ok!"); - process.kill(1, "SIGTERM"); - } - ); wsapp.enable('trust proxy'); const wsserver = wsapp.listen(PORT, HOST, () => { console.log(`WS App listening on http://${HOST}:${PORT}`); - console.log('Press Ctrl+C to quit.'); + health.healthApp.listen(health.PORT, HOST, health.listen_cb); }); socket.start(wsserver); @@ -102,13 +96,6 @@ if (process.env.uws !== "true") { uapp.post(`${PREFIX}/${P_KEY}/sockets-live/:projectKey`, uWrapper(socket.handlers.socketsLiveByProject)); uapp.get(`${PREFIX}/${P_KEY}/sockets-live/:projectKey/:sessionId`, uWrapper(socket.handlers.socketsLiveByProject)); - uapp.get('/private/shutdown', (res, req) => { - console.log("Requested shutdown"); - res.writeStatus('200 OK').end("ok!"); - process.kill(1, "SIGTERM"); - } - ); - socket.start(uapp); uapp.listen(HOST, PORT, (token) => { @@ -116,7 +103,7 @@ if (process.env.uws !== "true") { console.warn("port already in use"); } console.log(`WS App listening on http://${HOST}:${PORT}`); - console.log('Press Ctrl+C to quit.'); + health.healthApp.listen(health.PORT, HOST, health.listen_cb); }); diff --git a/ee/assist/servers/websocket-cluster.js b/ee/assist/servers/websocket-cluster.js index a1f389685..4618a6184 100644 --- a/ee/assist/servers/websocket-cluster.js +++ b/ee/assist/servers/websocket-cluster.js @@ -83,6 +83,22 @@ const respond = function (res, data) { } } +const countSessions = async function () { + let count = 0; + try { + let rooms = await io.of('/').adapter.allRooms(); + for (let i of rooms) { + let {projectKey, sessionId} = extractPeerId(i); + if (projectKey !== undefined && sessionId !== undefined) { + count++; + } + } + } catch (e) { + console.error(e); + } + return count; +} + const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); let filters = await extractPayloadFromRequest(req, res); @@ -417,6 +433,7 @@ module.exports = { process.exit(2); }); }, + countSessions, handlers: { socketsList, socketsListByProject, diff --git a/ee/assist/servers/websocket.js b/ee/assist/servers/websocket.js index 330361df3..7fb1c9684 100644 --- a/ee/assist/servers/websocket.js +++ b/ee/assist/servers/websocket.js @@ -66,6 +66,23 @@ const respond = function (res, data) { } } +const countSessions = async function () { + let count = 0; + try { + const arr = Array.from(io.sockets.adapter.rooms); + const filtered = arr.filter(room => !room[1].has(room[0])); + for (let i of filtered) { + let {projectKey, sessionId} = extractPeerId(i[0]); + if (projectKey !== null && sessionId !== null) { + count++; + } + } + } catch (e) { + console.error(e); + } + return count; +} + const socketsList = async function (req, res) { debug && console.log("[WS]looking for all available sessions"); let filters = await extractPayloadFromRequest(req, res); @@ -379,6 +396,7 @@ module.exports = { socketConnexionTimeout(io); }, + countSessions, handlers: { socketsList, socketsListByProject, diff --git a/ee/assist/utils/health.js b/ee/assist/utils/health.js new file mode 100644 index 000000000..bcb64f61c --- /dev/null +++ b/ee/assist/utils/health.js @@ -0,0 +1,61 @@ +const express = require('express'); +let socket; +if (process.env.redis === "true") { + socket = require("../servers/websocket-cluster"); +} else { + socket = require("../servers/websocket"); +} +const HOST = process.env.LISTEN_HOST || '0.0.0.0'; +const PORT = process.env.HEALTH_PORT || 8888; + + +const {request_logger} = require("./helper"); +const debug = process.env.debug === "1"; +const respond = function (res, data) { + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify({"data": data})); +} + +const check_health = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + respond(res, { + "health": true, + "details": { + "version": process.env.npm_package_version, + "connectedSessions": await socket.countSessions(), + "uWebSocket": process.env.uws === "true", + "redis": process.env.redis === "true" + } + }); +} + + +const healthApp = express(); +healthApp.use(express.json()); +healthApp.use(express.urlencoded({extended: true})); +healthApp.use(request_logger("[healthApp]")); +healthApp.get(['/'], (req, res) => { + res.statusCode = 200; + res.end("healthApp ok!"); + } +); +healthApp.get('/health', check_health); +healthApp.get('/shutdown', (req, res) => { + console.log("Requested shutdown"); + res.statusCode = 200; + res.end("ok!"); + process.kill(1, "SIGTERM"); + } +); + +const listen_cb = async function () { + console.log(`Health App listening on http://${HOST}:${PORT}`); + console.log('Press Ctrl+C to quit.'); +} + +module.exports = { + healthApp, + PORT, + listen_cb +}; diff --git a/peers/clean-dev.sh b/peers/clean-dev.sh new file mode 100755 index 000000000..a0cb5c9ed --- /dev/null +++ b/peers/clean-dev.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +rm -rf ./utils \ No newline at end of file diff --git a/peers/package-lock.json b/peers/package-lock.json index 5811b59e3..b18dca820 100644 --- a/peers/package-lock.json +++ b/peers/package-lock.json @@ -1,12 +1,12 @@ { "name": "peers-server", - "version": "1.0.0", + "version": "v1.11.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "peers-server", - "version": "1.0.0", + "version": "v1.11.0", "license": "Elastic License 2.0 (ELv2)", "dependencies": { "express": "^4.18.2", @@ -57,9 +57,9 @@ "integrity": "sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==" }, "node_modules/@types/node": { - "version": "18.14.6", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz", - "integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==" + "version": "18.15.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.1.tgz", + "integrity": "sha512-U2TWca8AeHSmbpi314QBESRk7oPjSZjDsR+c+H4ECC1l+kFgpZf8Ydhv3SJpPy51VyZHHqxlb6mTTqYNNRVAIw==" }, "node_modules/@types/qs": { "version": "6.9.7", @@ -600,9 +600,9 @@ } }, "node_modules/node-fetch": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.0.tgz", - "integrity": "sha512-BKwRP/O0UvoMKp7GNdwPlObhYGB5DQqwhEDQlNKuoqwVYSxkSZCSbHjnFFmUEtwSKRPU4kNK8PbDYYitwaE3QA==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.1.tgz", + "integrity": "sha512-cRVc/kyto/7E5shrWca1Wsea4y6tL9iYJE5FBCius3JQfb/4P4I295PfhgbJQBLTx6lATE4z+wK0rPM4VS2uow==", "dependencies": { "data-uri-to-buffer": "^4.0.0", "fetch-blob": "^3.1.4", @@ -657,9 +657,9 @@ "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, "node_modules/peer": { - "version": "1.0.0-rc.10", - "resolved": "https://registry.npmjs.org/peer/-/peer-1.0.0-rc.10.tgz", - "integrity": "sha512-S7uMqIAd1tTyvnkj4efdpn8EGc6BM1ONQvLg0vZkrnvA1cTisscBRsx+Jbor6DH68NRLnXgZbiY7/6FDER/GXw==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/peer/-/peer-1.0.0.tgz", + "integrity": "sha512-fPVtyCKZWVfjbf7XnY7MskhTlu+pBpMvQV81sngT8aXIuT5YF9y9bwIw8y5BlI98DV0NsDpLjow/oemFNvcKkg==", "dependencies": { "@types/express": "^4.17.3", "@types/ws": "^7.2.3 || ^8.0.0", @@ -921,9 +921,9 @@ } }, "node_modules/ws": { - "version": "8.12.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.12.1.tgz", - "integrity": "sha512-1qo+M9Ba+xNhPB+YTWUlK6M17brTut5EXbcBaMRN5pH5dFrXz7lzz1ChFSUq3bOUl8yEvSenhHmYUNJxFzdJew==", + "version": "8.13.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", + "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", "engines": { "node": ">=10.0.0" }, diff --git a/peers/package.json b/peers/package.json index 400274ffc..82fd0ddf6 100644 --- a/peers/package.json +++ b/peers/package.json @@ -1,6 +1,6 @@ { "name": "peers-server", - "version": "1.0.0", + "version": "v1.11.0", "description": "assist server to get live sessions & sourcemaps reader to get stack trace", "main": "peerjs-server.js", "scripts": { diff --git a/peers/prepare-dev.sh b/peers/prepare-dev.sh new file mode 100755 index 000000000..d4825a3d0 --- /dev/null +++ b/peers/prepare-dev.sh @@ -0,0 +1,3 @@ +#!/bin/bash +rsync -avr --exclude=".*" --ignore-existing ../assist/utils ./ +cp ../sourcemap-reader/utils/health.js ./utils/. \ No newline at end of file diff --git a/peers/run-dev.sh b/peers/run-dev.sh new file mode 100755 index 000000000..00e8d5a4b --- /dev/null +++ b/peers/run-dev.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -a +source .env +set +a + +npm start \ No newline at end of file diff --git a/peers/server.js b/peers/server.js index 4e25a0827..8cf12d546 100644 --- a/peers/server.js +++ b/peers/server.js @@ -1,5 +1,6 @@ const dumps = require('./utils/HeapSnapshot'); const {request_logger} = require('./utils/helper'); +const health = require("./utils/health"); const assert = require('assert').strict; const {peerRouter, peerConnection, peerDisconnect, peerError} = require('./servers/peerjs-server'); const express = require('express'); @@ -44,10 +45,4 @@ process.on('uncaughtException', err => { // process.exit(1); }); -app.get('/private/shutdown', (req, res) => { - console.log("Requested shutdown"); - res.statusCode = 200; - res.end("ok!"); - process.kill(1, "SIGTERM"); - } -); \ No newline at end of file +health.healthApp.listen(health.PORT, HOST, health.listen_cb); \ No newline at end of file diff --git a/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql b/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql index 13813f5bc..41521a886 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql @@ -17,4 +17,9 @@ ALTER TABLE public.projects "defaultInputMode": "obscured" }'::jsonb; +ALTER TYPE issue_type ADD VALUE IF NOT EXISTS 'mouse_thrashing'; + +ALTER TABLE events.clicks + ADD COLUMN hesitation integer NULL; + COMMIT; \ No newline at end of file diff --git a/sourcemap-reader/.gitignore b/sourcemap-reader/.gitignore index 09c49b304..f2686decf 100644 --- a/sourcemap-reader/.gitignore +++ b/sourcemap-reader/.gitignore @@ -3,5 +3,8 @@ node_modules npm-debug.log .cache test.html -/utils/ +/utils/assistHelper.js +/utils/geoIP.js +/utils/HeapSnapshot.js +/utils/helper.js mappings.wasm diff --git a/sourcemap-reader/clean-dev.sh b/sourcemap-reader/clean-dev.sh index a0cb5c9ed..ebc1c36c6 100755 --- a/sourcemap-reader/clean-dev.sh +++ b/sourcemap-reader/clean-dev.sh @@ -1,3 +1,6 @@ #!/bin/bash -rm -rf ./utils \ No newline at end of file +rm -rf ./utils/assistHelper.js +rm -rf ./utils/geoIP.js +rm -rf ./utils/HeapSnapshot.js +rm -rf ./utils/helper.js \ No newline at end of file diff --git a/sourcemap-reader/package-lock.json b/sourcemap-reader/package-lock.json index e756a0649..1b3f5ec82 100644 --- a/sourcemap-reader/package-lock.json +++ b/sourcemap-reader/package-lock.json @@ -1,12 +1,12 @@ { "name": "sourcemaps-reader", - "version": "1.0.0", + "version": "v1.11.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "sourcemaps-reader", - "version": "1.0.0", + "version": "v1.11.0", "license": "Elastic License 2.0 (ELv2)", "dependencies": { "aws-sdk": "^2.1314.0", @@ -43,9 +43,9 @@ } }, "node_modules/aws-sdk": { - "version": "2.1329.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1329.0.tgz", - "integrity": "sha512-F5M9x/T+PanPiYGiL95atFE6QiwzJWwgPahaEgUdq+qvVAgruiNy5t6nw2B5tBB/yWDPPavHFip3UsXeO0qU3Q==", + "version": "2.1333.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1333.0.tgz", + "integrity": "sha512-MvOuleNeRryJtkCGXGEWDHPqqgxuqdi4/hGzJEpn9tnjsW9LNK8UgFPpYzUZ24ZO/3S+jiUh8DMMrL5nVGnagg==", "dependencies": { "buffer": "4.9.2", "events": "1.1.1", diff --git a/sourcemap-reader/package.json b/sourcemap-reader/package.json index 9d5a2806b..5a9b28ef8 100644 --- a/sourcemap-reader/package.json +++ b/sourcemap-reader/package.json @@ -1,6 +1,6 @@ { "name": "sourcemaps-reader", - "version": "1.0.0", + "version": "v1.11.0", "description": "assist server to get live sessions & sourcemaps reader to get stack trace", "main": "peerjs-server.js", "scripts": { diff --git a/sourcemap-reader/server.js b/sourcemap-reader/server.js index 02f63475b..08e3f926f 100644 --- a/sourcemap-reader/server.js +++ b/sourcemap-reader/server.js @@ -1,11 +1,12 @@ const dumps = require('./utils/HeapSnapshot'); const sourcemapsReaderServer = require('./servers/sourcemaps-server'); const express = require('express'); +const health = require("./utils/health"); const {request_logger} = require("./utils/helper"); const HOST = process.env.SMR_HOST || '127.0.0.1'; const PORT = process.env.SMR_PORT || 9000; -const PREFIX = process.env.PREFIX || process.env.prefix || '' +const PREFIX = process.env.PREFIX || process.env.prefix || ''; const P_KEY = process.env.SMR_KEY || 'smr'; const heapdump = process.env.heapdump === "1"; @@ -21,14 +22,7 @@ heapdump && app.use(`${PREFIX}/${P_KEY}/heapdump`, dumps.router); const server = app.listen(PORT, HOST, () => { console.log(`SR App listening on http://${HOST}:${PORT}`); - console.log('Press Ctrl+C to quit.'); + health.healthApp.listen(health.PORT, HOST, health.listen_cb); }); -module.exports = {server}; -app.get('/private/shutdown', (req, res) => { - console.log("Requested shutdown"); - res.statusCode = 200; - res.end("ok!"); - process.kill(1, "SIGTERM"); - } -); \ No newline at end of file +module.exports = {server}; \ No newline at end of file diff --git a/sourcemap-reader/utils/health.js b/sourcemap-reader/utils/health.js new file mode 100644 index 000000000..0b89dd1d8 --- /dev/null +++ b/sourcemap-reader/utils/health.js @@ -0,0 +1,52 @@ +const express = require('express'); +const HOST = process.env.LISTEN_HOST || '0.0.0.0'; +const PORT = process.env.HEALTH_PORT || 8888; + + +const {request_logger} = require("./helper"); +const debug = process.env.debug === "1"; +const respond = function (res, data) { + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify({"data": data})); +} + +const check_health = async function (req, res) { + debug && console.log("[WS]looking for all available sessions"); + respond(res, { + "health": true, + "details": { + "version": process.env.npm_package_version + } + }); +} + + +const healthApp = express(); +healthApp.use(express.json()); +healthApp.use(express.urlencoded({extended: true})); +healthApp.use(request_logger("[healthApp]")); +healthApp.get(['/'], (req, res) => { + res.statusCode = 200; + res.end("healthApp ok!"); + } +); +healthApp.get('/health', check_health); +healthApp.get('/shutdown', (req, res) => { + console.log("Requested shutdown"); + res.statusCode = 200; + res.end("ok!"); + process.kill(1, "SIGTERM"); + } +); + +const listen_cb = async function () { + console.log(`Health App listening on http://${HOST}:${PORT}`); + console.log('Press Ctrl+C to quit.'); +} + +module.exports = { + healthApp, + PORT, + listen_cb +}; From b8ddbee0cf67b85b629453514b1e5cd0d752b0f7 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 13 Mar 2023 18:12:39 +0100 Subject: [PATCH 17/60] feat(chalice): health-check changes chore(deployment): changed service ports --- api/chalicelib/core/health.py | 2 -- scripts/helmcharts/openreplay/charts/assist/values.yaml | 1 + scripts/helmcharts/openreplay/charts/peers/values.yaml | 1 + .../helmcharts/openreplay/charts/sourcemapreader/values.yaml | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) diff --git a/api/chalicelib/core/health.py b/api/chalicelib/core/health.py index 0a73661b7..980c7a41b 100644 --- a/api/chalicelib/core/health.py +++ b/api/chalicelib/core/health.py @@ -41,7 +41,6 @@ else: "ingress-nginx": "http://ingress-nginx-openreplay.app.svc.cluster.local:8888/metrics", "integrations": "http://integrations-openreplay.app.svc.cluster.local:8888/metrics", "peers": "http://peers-openreplay.app.svc.cluster.local:8888/health", - "quickwit": "http://quickwit-openreplay.app.svc.cluster.local:8888/metrics", "sink": "http://sink-openreplay.app.svc.cluster.local:8888/metrics", "sourcemapreader": "http://sourcemapreader-openreplay.app.svc.cluster.local:8888/health", "storage": "http://storage-openreplay.app.svc.cluster.local:8888/metrics", @@ -168,7 +167,6 @@ def get_health(): "ingress-nginx": __always_healthy, "integrations": __check_be_service("integrations"), "peers": __check_be_service("peers"), - "quickwit": __check_be_service("quickwit"), "sink": __check_be_service("sink"), "sourcemapreader": __check_be_service("sourcemapreader"), "storage": __check_be_service("storage") diff --git a/scripts/helmcharts/openreplay/charts/assist/values.yaml b/scripts/helmcharts/openreplay/charts/assist/values.yaml index 8ff07d2d0..5e84e8c60 100644 --- a/scripts/helmcharts/openreplay/charts/assist/values.yaml +++ b/scripts/helmcharts/openreplay/charts/assist/values.yaml @@ -64,6 +64,7 @@ service: type: ClusterIP ports: socketio: 9001 + metrics: 8888 ingress: enabled: true diff --git a/scripts/helmcharts/openreplay/charts/peers/values.yaml b/scripts/helmcharts/openreplay/charts/peers/values.yaml index 0bc4b6b14..0bf7fc27e 100644 --- a/scripts/helmcharts/openreplay/charts/peers/values.yaml +++ b/scripts/helmcharts/openreplay/charts/peers/values.yaml @@ -64,6 +64,7 @@ service: type: ClusterIP ports: peerjs: 9000 + metrics: 8888 ingress: enabled: true diff --git a/scripts/helmcharts/openreplay/charts/sourcemapreader/values.yaml b/scripts/helmcharts/openreplay/charts/sourcemapreader/values.yaml index ec9fe9655..d14069fca 100644 --- a/scripts/helmcharts/openreplay/charts/sourcemapreader/values.yaml +++ b/scripts/helmcharts/openreplay/charts/sourcemapreader/values.yaml @@ -48,6 +48,7 @@ service: type: ClusterIP ports: sourcemapreader: 9000 + metrics: 8888 serviceMonitor: enabled: false From e890ff8a14db31a5b612e7e73e7b5b6f7c3b5186 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Mar 2023 11:51:02 +0100 Subject: [PATCH 18/60] feat(chalice): split replay --- api/chalicelib/core/sessions_replay.py | 191 +++++++++++++++++++++++++ api/routers/core_dynamic.py | 40 +++++- 2 files changed, 227 insertions(+), 4 deletions(-) create mode 100644 api/chalicelib/core/sessions_replay.py diff --git a/api/chalicelib/core/sessions_replay.py b/api/chalicelib/core/sessions_replay.py new file mode 100644 index 000000000..84c6703eb --- /dev/null +++ b/api/chalicelib/core/sessions_replay.py @@ -0,0 +1,191 @@ +from typing import List + +import schemas +from chalicelib.core import events, metadata, events_ios, \ + sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \ + sessions_devtool, sessions_notes +from chalicelib.utils import errors_helper +from chalicelib.utils import pg_client, helper, metrics_helper +from chalicelib.utils import sql_helper as sh + + +def __group_metadata(session, project_metadata): + meta = {} + for m in project_metadata.keys(): + if project_metadata[m] is not None and session.get(m) is not None: + meta[project_metadata[m]] = session[m] + session.pop(m) + return meta + + +# for backward compatibility +def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=data["startTs"], duration=data["duration"]) + + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) + + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def get_events(project_id, session_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify( + f"""SELECT s.*, + s.session_id::text AS session_id + FROM public.sessions AS s + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + + if data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=data["startTs"], duration=data["duration"]) + + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + return data + else: + return None diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 3389074bf..cdd57a327 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -6,7 +6,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \ - sessions_favorite, assist, sessions_notes, click_maps + sessions_favorite, assist, sessions_notes, click_maps, sessions_replay from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -145,13 +145,14 @@ async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): stack_integrations=True)} -@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"]) +# for backward compatibility +@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"]) async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - include_fav_viewed=True, group_metadata=True, context=context) + data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -162,6 +163,37 @@ async def get_session(projectId: int, sessionId: Union[int, str], background_tas } +@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"]) +async def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) + if data is None: + return {"errors": ["session not found"]} + if data.get("inDB"): + background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, user_id=context.user_id, + session_id=sessionId) + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"]) +async def get_session_events(projectId: int, sessionId: Union[int, str], + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_events(project_id=projectId, session_id=sessionId) + if data is None: + return {"errors": ["session not found"]} + + return { + 'data': data + } + + @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) async def get_error_trace(projectId: int, sessionId: int, errorId: str, context: schemas.CurrentContext = Depends(OR_context)): From 68dd0a7f146eb85f665dc8b8b7a4fb9755ae13a3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Mar 2023 13:25:14 +0100 Subject: [PATCH 19/60] feat(chalice): split replay --- api/chalicelib/core/sessions.py | 88 +-------- api/chalicelib/core/sessions_favorite.py | 19 +- api/chalicelib/core/sessions_replay.py | 23 +-- api/routers/core_dynamic.py | 4 +- ee/api/chalicelib/core/sessions.py | 91 +--------- ee/api/chalicelib/core/sessions_favorite.py | 27 +-- ee/api/chalicelib/core/sessions_replay.py | 189 ++++++++++++++++++++ ee/api/routers/core_dynamic.py | 50 +++++- 8 files changed, 273 insertions(+), 218 deletions(-) create mode 100644 ee/api/chalicelib/core/sessions_replay.py diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index c95bed903..8f98aac83 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -1,10 +1,7 @@ from typing import List import schemas -from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \ - sessions_devtool, sessions_notes -from chalicelib.utils import errors_helper +from chalicelib.core import events, metadata, projects, performance_event, sessions_favorite from chalicelib.utils import pg_client, helper, metrics_helper from chalicelib.utils import sql_helper as sh @@ -33,89 +30,6 @@ COALESCE((SELECT TRUE AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ -def __group_metadata(session, project_metadata): - meta = {} - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta[project_metadata[m]] = session[m] - session.pop(m) - return meta - - -def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, - group_metadata=False, live=True): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": context.user_id} - ) - # print("===============") - # print(query) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) - else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - # limit the number of errors to reduce the response-body size - data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors - if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], duration=data["duration"]) - - data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, - session_id=session_id, user_id=context.user_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, - project_key=data["projectKey"]) - data["inDB"] = True - return data - elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) - else: - return None - - # This function executes the query and return result def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): diff --git a/api/chalicelib/core/sessions_favorite.py b/api/chalicelib/core/sessions_favorite.py index 00228b31f..d3bf5e9b4 100644 --- a/api/chalicelib/core/sessions_favorite.py +++ b/api/chalicelib/core/sessions_favorite.py @@ -1,5 +1,4 @@ import schemas -from chalicelib.core import sessions from chalicelib.utils import pg_client @@ -8,11 +7,14 @@ def add_favorite_session(context: schemas.CurrentContext, project_id, session_id cur.execute( cur.mogrify(f"""\ INSERT INTO public.user_favorite_sessions(user_id, session_id) - VALUES (%(userId)s,%(session_id)s);""", + VALUES (%(userId)s,%(session_id)s) + RETURNING session_id;""", {"userId": context.user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(context=context, project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True) + row = cur.fetchone() + if row: + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id): @@ -21,11 +23,14 @@ def remove_favorite_session(context: schemas.CurrentContext, project_id, session cur.mogrify(f"""\ DELETE FROM public.user_favorite_sessions WHERE user_id = %(userId)s - AND session_id = %(session_id)s;""", + AND session_id = %(session_id)s + RETURNING session_id;""", {"userId": context.user_id, "session_id": session_id}) ) - return sessions.get_by_id2_pg(context=context, project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True) + row = cur.fetchone() + if row: + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def favorite_session(context: schemas.CurrentContext, project_id, session_id): diff --git a/api/chalicelib/core/sessions_replay.py b/api/chalicelib/core/sessions_replay.py index 84c6703eb..94e3cc504 100644 --- a/api/chalicelib/core/sessions_replay.py +++ b/api/chalicelib/core/sessions_replay.py @@ -1,12 +1,8 @@ -from typing import List - import schemas from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \ - sessions_devtool, sessions_notes + sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes from chalicelib.utils import errors_helper -from chalicelib.utils import pg_client, helper, metrics_helper -from chalicelib.utils import sql_helper as sh +from chalicelib.utils import pg_client, helper def __group_metadata(session, project_metadata): @@ -148,8 +144,7 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat def get_events(project_id, session_id): with pg_client.PostgresClient() as cur: query = cur.mogrify( - f"""SELECT s.*, - s.session_id::text AS session_id + f"""SELECT session_id, platform, start_ts, duration FROM public.sessions AS s WHERE s.project_id = %(project_id)s AND s.session_id = %(session_id)s;""", @@ -159,11 +154,11 @@ def get_events(project_id, session_id): # print(query) cur.execute(query=query) - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - - if data["platform"] == 'ios': + s_data = cur.fetchone() + if s_data is not None: + s_data = helper.dict_to_camel_case(s_data) + data = {} + if s_data["platform"] == 'ios': data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) for e in data['events']: if e["type"].endswith("_IOS"): @@ -183,7 +178,7 @@ def get_events(project_id, session_id): data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, session_id=session_id) data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], duration=data["duration"]) + start_ts=s_data["startTs"], duration=s_data["duration"]) data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) return data diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index cdd57a327..c6f71e88f 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -271,8 +271,8 @@ async def get_live_session(projectId: int, sessionId: str, background_tasks: Bac context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: - data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, - full_data=True, include_fav_viewed=True, group_metadata=True, live=False) + data = sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId, + full_data=True, include_fav_viewed=True, group_metadata=True, live=False) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 6d92c3954..8f98aac83 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -1,11 +1,7 @@ from typing import List import schemas -import schemas_ee -from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, sessions_favorite, \ - sessions_devtool, sessions_notes -from chalicelib.utils import errors_helper +from chalicelib.core import events, metadata, projects, performance_event, sessions_favorite from chalicelib.utils import pg_client, helper, metrics_helper from chalicelib.utils import sql_helper as sh @@ -34,91 +30,6 @@ COALESCE((SELECT TRUE AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ -def __group_metadata(session, project_metadata): - meta = {} - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta[project_metadata[m]] = session[m] - session.pop(m) - return meta - - -def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, - include_fav_viewed=False, group_metadata=False, live=True): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key, - encode(file_key,'hex') AS file_key - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": context.user_id} - ) - # print("===============") - # print(query) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) - else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - # limit the number of errors to reduce the response-body size - data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors - if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, - context=context) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], duration=data["duration"]) - - data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, - session_id=session_id, user_id=context.user_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, - project_key=data["projectKey"]) - data["inDB"] = True - return data - elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) - else: - return None - - # This function executes the query and return result def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): diff --git a/ee/api/chalicelib/core/sessions_favorite.py b/ee/api/chalicelib/core/sessions_favorite.py index d8ae4e1f7..85e308756 100644 --- a/ee/api/chalicelib/core/sessions_favorite.py +++ b/ee/api/chalicelib/core/sessions_favorite.py @@ -10,13 +10,15 @@ def add_favorite_session(context: schemas_ee.CurrentContext, project_id, session cur.execute( cur.mogrify(f"""\ INSERT INTO public.user_favorite_sessions(user_id, session_id) - VALUES (%(userId)s,%(sessionId)s);""", - {"userId": context.user_id, "sessionId": session_id}) + VALUES (%(userId)s,%(session_id)s) + RETURNING session_id;""", + {"userId": context.user_id, "session_id": session_id}) ) - - sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True, context=context) + row = cur.fetchone() + if row: + sessions_favorite_exp.add_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): @@ -25,12 +27,15 @@ def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, sess cur.mogrify(f"""\ DELETE FROM public.user_favorite_sessions WHERE user_id = %(userId)s - AND session_id = %(sessionId)s;""", - {"userId": context.user_id, "sessionId": session_id}) + AND session_id = %(session_id)s + RETURNING session_id;""", + {"userId": context.user_id, "session_id": session_id}) ) - sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) - return sessions.get_by_id2_pg(project_id=project_id, session_id=session_id, - full_data=False, include_fav_viewed=True, context=context) + row = cur.fetchone() + if row: + sessions_favorite_exp.remove_favorite_session(project_id=project_id, user_id=context.user_id, session_id=session_id) + return {"data": {"sessionId": session_id}} + return {"errors": ["something went wrong"]} def favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): diff --git a/ee/api/chalicelib/core/sessions_replay.py b/ee/api/chalicelib/core/sessions_replay.py new file mode 100644 index 000000000..798029aee --- /dev/null +++ b/ee/api/chalicelib/core/sessions_replay.py @@ -0,0 +1,189 @@ +import schemas +import schemas_ee +from chalicelib.core import events, metadata, events_ios, \ + sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes +from chalicelib.utils import errors_helper +from chalicelib.utils import pg_client, helper + + +def __group_metadata(session, project_metadata): + meta = {} + for m in project_metadata.keys(): + if project_metadata[m] is not None and session.get(m) is not None: + meta[project_metadata[m]] = session[m] + session.pop(m) + return meta + + +# for backward compatibility +def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, + include_fav_viewed=False, group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key, + encode(file_key,'hex') AS file_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, + context=context) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=data["startTs"], duration=data["duration"]) + + data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, + session_id=session_id, user_id=context.user_id) + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, + group_metadata=False, live=True): + with pg_client.PostgresClient() as cur: + extra_query = [] + if include_fav_viewed: + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_favorite_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS favorite""") + extra_query.append("""COALESCE((SELECT TRUE + FROM public.user_viewed_sessions AS fs + WHERE s.session_id = fs.session_id + AND fs.user_id = %(userId)s), FALSE) AS viewed""") + query = cur.mogrify( + f"""\ + SELECT + s.*, + s.session_id::text AS session_id, + (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key + {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} + {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} + FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id, "userId": context.user_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + data = cur.fetchone() + if data is not None: + data = helper.dict_to_camel_case(data) + if full_data: + if data["platform"] == 'ios': + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + else: + data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) + data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) + + data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) + data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, + project_key=data["projectKey"]) + data["inDB"] = True + return data + elif live: + return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) + else: + return None + + +def get_events(project_id, session_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify( + f"""SELECT session_id, platform, start_ts, duration + FROM public.sessions AS s + WHERE s.project_id = %(project_id)s + AND s.session_id = %(session_id)s;""", + {"project_id": project_id, "session_id": session_id} + ) + # print("===============") + # print(query) + cur.execute(query=query) + + s_data = cur.fetchone() + if s_data is not None: + s_data = helper.dict_to_camel_case(s_data) + data = {} + if s_data["platform"] == 'ios': + data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) + for e in data['events']: + if e["type"].endswith("_IOS"): + e["type"] = e["type"][:-len("_IOS")] + data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, + session_id=session_id) + else: + data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, + group_clickrage=True) + all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) + data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] + # to keep only the first stack + # limit the number of errors to reduce the response-body size + data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors + if e['source'] == "js_exception"][:500] + data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, + start_ts=s_data["startTs"], duration=s_data["duration"]) + + data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) + return data + else: + return None diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 8c8aa55b6..209fdbd6d 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -7,7 +7,7 @@ from starlette.responses import RedirectResponse, FileResponse import schemas import schemas_ee from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \ - errors_favorite, sessions_notes, click_maps + errors_favorite, sessions_notes, click_maps, sessions_replay from chalicelib.core import sessions_viewed from chalicelib.core import tenants, users, projects, license from chalicelib.core import webhook @@ -59,7 +59,8 @@ async def edit_account(data: schemas_ee.EditUserSchema = Body(...), @app.post('/integrations/slack', tags=['integrations']) @app.put('/integrations/slack', tags=['integrations']) -async def add_slack_client(data: schemas.AddCollaborationSchema, context: schemas.CurrentContext = Depends(OR_context)): +async def add_slack_integration(data: schemas.AddCollaborationSchema, + context: schemas.CurrentContext = Depends(OR_context)): n = Slack.add(tenant_id=context.tenant_id, data=data) if n is None: return { @@ -155,13 +156,15 @@ async def get_projects(context: schemas.CurrentContext = Depends(OR_context)): stack_integrations=True, user_id=context.user_id)} -@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) +# for backward compatibility +@app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"], + dependencies=[OR_scope(Permissions.session_replay)]) async def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if isinstance(sessionId, str): return {"errors": ["session not found"]} - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, - include_fav_viewed=True, group_metadata=True, context=context) + data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): @@ -172,6 +175,39 @@ async def get_session(projectId: int, sessionId: Union[int, str], background_tas } +@app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"], + dependencies=[OR_scope(Permissions.session_replay)]) +async def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True, + include_fav_viewed=True, group_metadata=True, context=context) + if data is None: + return {"errors": ["session not found"]} + if data.get("inDB"): + background_tasks.add_task(sessions_viewed.view_session, project_id=projectId, user_id=context.user_id, + session_id=sessionId) + return { + 'data': data + } + + +@app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"], + dependencies=[OR_scope(Permissions.session_replay)]) +async def get_session_events(projectId: int, sessionId: Union[int, str], + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(sessionId, str): + return {"errors": ["session not found"]} + data = sessions_replay.get_events(project_id=projectId, session_id=sessionId) + if data is None: + return {"errors": ["session not found"]} + + return { + 'data': data + } + + @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"], dependencies=[OR_scope(Permissions.dev_tools)]) async def get_error_trace(projectId: int, sessionId: int, errorId: str, @@ -250,8 +286,8 @@ async def get_live_session(projectId: int, sessionId: str, background_tasks: Bac context: schemas_ee.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: - data = sessions.get_by_id2_pg(context=context, project_id=projectId, session_id=sessionId, - full_data=True, include_fav_viewed=True, group_metadata=True, live=False) + data = sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId, + full_data=True, include_fav_viewed=True, group_metadata=True, live=False) if data is None: return {"errors": ["session not found"]} if data.get("inDB"): From 0654808f68f87d5d5f4cc9d3d69c5a2c31ef99de Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Mar 2023 14:03:45 +0100 Subject: [PATCH 20/60] feat(chalice): code cleaning --- ee/api/.gitignore | 1 + ee/api/chalicelib/core/sessions.py | 1126 ---------------------------- ee/api/clean-dev.sh | 1 + 3 files changed, 2 insertions(+), 1126 deletions(-) delete mode 100644 ee/api/chalicelib/core/sessions.py diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 9a9636ee1..27ac41f5c 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -215,6 +215,7 @@ Pipfile.lock /chalicelib/core/log_tool_sumologic.py /chalicelib/core/metadata.py /chalicelib/core/mobile.py +/chalicelib/core/sessions.py /chalicelib/core/sessions_assignments.py #exp /chalicelib/core/sessions_metas.py /chalicelib/core/sessions_mobs.py diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py deleted file mode 100644 index 8f98aac83..000000000 --- a/ee/api/chalicelib/core/sessions.py +++ /dev/null @@ -1,1126 +0,0 @@ -from typing import List - -import schemas -from chalicelib.core import events, metadata, projects, performance_event, sessions_favorite -from chalicelib.utils import pg_client, helper, metrics_helper -from chalicelib.utils import sql_helper as sh - -SESSION_PROJECTION_COLS = """s.project_id, -s.session_id::text AS session_id, -s.user_uuid, -s.user_id, -s.user_os, -s.user_browser, -s.user_device, -s.user_device_type, -s.user_country, -s.start_ts, -s.duration, -s.events_count, -s.pages_count, -s.errors_count, -s.user_anonymous_id, -s.platform, -s.issue_score, -to_jsonb(s.issue_types) AS issue_types, -favorite_sessions.session_id NOTNULL AS favorite, -COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ - - -# This function executes the query and return result -def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, - error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): - if data.bookmarked: - data.startDate, data.endDate = sessions_favorite.get_start_end_timestamp(project_id, user_id) - - full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, - favorite_only=data.bookmarked, issue=issue, project_id=project_id, - user_id=user_id) - if data.limit is not None and data.page is not None: - full_args["sessions_limit"] = data.limit - full_args["sessions_limit_s"] = (data.page - 1) * data.limit - full_args["sessions_limit_e"] = data.page * data.limit - else: - full_args["sessions_limit"] = 200 - full_args["sessions_limit_s"] = 1 - full_args["sessions_limit_e"] = 200 - - meta_keys = [] - with pg_client.PostgresClient() as cur: - if errors_only: - main_query = cur.mogrify(f"""SELECT DISTINCT er.error_id, - COALESCE((SELECT TRUE - FROM public.user_viewed_errors AS ve - WHERE er.error_id = ve.error_id - AND ve.user_id = %(userId)s LIMIT 1), FALSE) AS viewed - {query_part};""", full_args) - - elif count_only: - main_query = cur.mogrify(f"""SELECT COUNT(DISTINCT s.session_id) AS count_sessions, - COUNT(DISTINCT s.user_uuid) AS count_users - {query_part};""", full_args) - elif data.group_by_user: - g_sort = "count(full_sessions)" - if data.order is None: - data.order = schemas.SortOrderType.desc.value - else: - data.order = data.order.value - if data.sort is not None and data.sort != 'sessionsCount': - sort = helper.key_to_snake_case(data.sort) - g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" - else: - sort = 'start_ts' - - meta_keys = metadata.get(project_id=project_id) - main_query = cur.mogrify(f"""SELECT COUNT(*) AS count, - COALESCE(JSONB_AGG(users_sessions) - FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT user_id, - count(full_sessions) AS user_sessions_count, - jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, - MIN(full_sessions.start_ts) AS first_session_ts, - ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn - FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ) AS filtred_sessions - ) AS full_sessions - GROUP BY user_id - ) AS users_sessions;""", - full_args) - elif ids_only: - main_query = cur.mogrify(f"""SELECT DISTINCT ON(s.session_id) s.session_id - {query_part} - ORDER BY s.session_id desc - LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s;""", - full_args) - else: - if data.order is None: - data.order = schemas.SortOrderType.desc.value - else: - data.order = data.order.value - sort = 'session_id' - if data.sort is not None and data.sort != "session_id": - # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) - sort = helper.key_to_snake_case(data.sort) - meta_keys = metadata.get(project_id=project_id) - main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, - COALESCE(JSONB_AGG(full_sessions) - FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn - FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} - {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", - full_args) - # print("--------------------") - # print(main_query) - # print("--------------------") - try: - cur.execute(main_query) - except Exception as err: - print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------") - print(main_query.decode('UTF-8')) - print("--------- PAYLOAD -----------") - print(data.json()) - print("--------------------") - raise err - if errors_only or ids_only: - return helper.list_to_camel_case(cur.fetchall()) - - sessions = cur.fetchone() - if count_only: - return helper.dict_to_camel_case(sessions) - - total = sessions["count"] - sessions = sessions["sessions"] - - if data.group_by_user: - for i, s in enumerate(sessions): - sessions[i] = {**s.pop("last_session")[0], **s} - sessions[i].pop("rn") - sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \ - if sessions[i][f'metadata_{k["index"]}'] is not None} - else: - for i, s in enumerate(sessions): - sessions[i]["metadata"] = {k["key"]: sessions[i][f'metadata_{k["index"]}'] for k in meta_keys \ - if sessions[i][f'metadata_{k["index"]}'] is not None} - # if not data.group_by_user and data.sort is not None and data.sort != "session_id": - # sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data.sort)], - # reverse=data.order.upper() == "DESC") - return { - 'total': total, - 'sessions': helper.list_to_camel_case(sessions) - } - - -def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int, - view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType, - metric_of: schemas.MetricOfTable, metric_value: List): - step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate, - density=density, factor=1, decimal=True)) - extra_event = None - if metric_of == schemas.MetricOfTable.visited_url: - extra_event = "events.pages" - elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0: - data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, - operator=schemas.SearchEventOperator._is)) - full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False, - favorite_only=False, issue=None, project_id=project_id, - user_id=None, extra_event=extra_event) - full_args["step_size"] = step_size - sessions = [] - with pg_client.PostgresClient() as cur: - if metric_type == schemas.MetricType.timeseries: - if view_type == schemas.MetricTimeseriesViewType.line_chart: - main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts - {query_part}) - SELECT generated_timestamp AS timestamp, - COUNT(s) AS count - FROM generate_series(%(startDate)s, %(endDate)s, %(step_size)s) AS generated_timestamp - LEFT JOIN LATERAL ( SELECT 1 AS s - FROM full_sessions - WHERE start_ts >= generated_timestamp - AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE) - GROUP BY generated_timestamp - ORDER BY generated_timestamp;""", full_args) - else: - main_query = cur.mogrify(f"""SELECT count(DISTINCT s.session_id) AS count - {query_part};""", full_args) - - # print("--------------------") - # print(main_query) - # print("--------------------") - try: - cur.execute(main_query) - except Exception as err: - print("--------- SESSIONS-SERIES QUERY EXCEPTION -----------") - print(main_query.decode('UTF-8')) - print("--------- PAYLOAD -----------") - print(data.json()) - print("--------------------") - raise err - if view_type == schemas.MetricTimeseriesViewType.line_chart: - sessions = cur.fetchall() - else: - sessions = cur.fetchone()["count"] - elif metric_type == schemas.MetricType.table: - if isinstance(metric_of, schemas.MetricOfTable): - main_col = "user_id" - extra_col = "" - extra_where = "" - pre_query = "" - distinct_on = "s.session_id" - if metric_of == schemas.MetricOfTable.user_country: - main_col = "user_country" - elif metric_of == schemas.MetricOfTable.user_device: - main_col = "user_device" - elif metric_of == schemas.MetricOfTable.user_browser: - main_col = "user_browser" - elif metric_of == schemas.MetricOfTable.issues: - main_col = "issue" - extra_col = f", UNNEST(s.issue_types) AS {main_col}" - if len(metric_value) > 0: - extra_where = [] - for i in range(len(metric_value)): - arg_name = f"selected_issue_{i}" - extra_where.append(f"{main_col} = %({arg_name})s") - full_args[arg_name] = metric_value[i] - extra_where = f"WHERE ({' OR '.join(extra_where)})" - elif metric_of == schemas.MetricOfTable.visited_url: - main_col = "path" - extra_col = ", path" - distinct_on += ",path" - main_query = cur.mogrify(f"""{pre_query} - SELECT COUNT(*) AS count, COALESCE(JSONB_AGG(users_sessions) FILTER ( WHERE rn <= 200 ), '[]'::JSONB) AS values - FROM (SELECT {main_col} AS name, - count(DISTINCT session_id) AS session_count, - ROW_NUMBER() OVER (ORDER BY count(full_sessions) DESC) AS rn - FROM (SELECT * - FROM (SELECT DISTINCT ON({distinct_on}) s.session_id, s.user_uuid, - s.user_id, s.user_os, - s.user_browser, s.user_device, - s.user_device_type, s.user_country, s.issue_types{extra_col} - {query_part} - ORDER BY s.session_id desc) AS filtred_sessions - ) AS full_sessions - {extra_where} - GROUP BY {main_col} - ORDER BY session_count DESC) AS users_sessions;""", - full_args) - # print("--------------------") - # print(main_query) - # print("--------------------") - cur.execute(main_query) - sessions = cur.fetchone() - for s in sessions["values"]: - s.pop("rn") - sessions["values"] = helper.list_to_camel_case(sessions["values"]) - - return sessions - - -def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema): - return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details, - schemas.EventType.graphql] \ - or event.type in [schemas.PerformanceEventType.location_dom_complete, - schemas.PerformanceEventType.location_largest_contentful_paint_time, - schemas.PerformanceEventType.location_ttfb, - schemas.PerformanceEventType.location_avg_cpu_load, - schemas.PerformanceEventType.location_avg_memory_usage - ] and (event.source is None or len(event.source) == 0) \ - or event.type in [schemas.EventType.request_details, schemas.EventType.graphql] and ( - event.filters is None or len(event.filters) == 0)) - - -# this function generates the query and return the generated-query with the dict of query arguments -def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue, - project_id, user_id, extra_event=None): - ss_constraints = [] - full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate, - "projectId": project_id, "userId": user_id} - extra_constraints = [ - "s.project_id = %(project_id)s", - "s.duration IS NOT NULL" - ] - extra_from = "" - events_query_part = "" - if len(data.filters) > 0: - meta_keys = None - for i, f in enumerate(data.filters): - if not isinstance(f.value, list): - f.value = [f.value] - filter_type = f.type - f.value = helper.values_for_operator(value=f.value, op=f.operator) - f_k = f"f_value{i}" - full_args = {**full_args, **sh.multi_values(f.value, value_key=f_k)} - op = sh.get_sql_operator(f.operator) \ - if filter_type not in [schemas.FilterType.events_count] else f.operator - is_any = sh.isAny_opreator(f.operator) - is_undefined = sh.isUndefined_operator(f.operator) - if not is_any and not is_undefined and len(f.value) == 0: - continue - is_not = False - if sh.is_negation_operator(f.operator): - is_not = True - if filter_type == schemas.FilterType.user_browser: - if is_any: - extra_constraints.append('s.user_browser IS NOT NULL') - ss_constraints.append('ms.user_browser IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, - value_key=f_k)) - - elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_ios]: - if is_any: - extra_constraints.append('s.user_os IS NOT NULL') - ss_constraints.append('ms.user_os IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - - elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_ios]: - if is_any: - extra_constraints.append('s.user_device IS NOT NULL') - ss_constraints.append('ms.user_device IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - - elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_ios]: - if is_any: - extra_constraints.append('s.user_country IS NOT NULL') - ss_constraints.append('ms.user_country IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, - value_key=f_k)) - - elif filter_type in [schemas.FilterType.utm_source]: - if is_any: - extra_constraints.append('s.utm_source IS NOT NULL') - ss_constraints.append('ms.utm_source IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.utm_source IS NULL') - ss_constraints.append('ms.utm_source IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.utm_source {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_medium]: - if is_any: - extra_constraints.append('s.utm_medium IS NOT NULL') - ss_constraints.append('ms.utm_medium IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.utm_medium IS NULL') - ss_constraints.append('ms.utm_medium IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_campaign]: - if is_any: - extra_constraints.append('s.utm_campaign IS NOT NULL') - ss_constraints.append('ms.utm_campaign IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.utm_campaign IS NULL') - ss_constraints.append('ms.utm_campaign IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f's.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not, - value_key=f_k)) - - elif filter_type == schemas.FilterType.duration: - if len(f.value) > 0 and f.value[0] is not None: - extra_constraints.append("s.duration >= %(minDuration)s") - ss_constraints.append("ms.duration >= %(minDuration)s") - full_args["minDuration"] = f.value[0] - if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0: - extra_constraints.append("s.duration <= %(maxDuration)s") - ss_constraints.append("ms.duration <= %(maxDuration)s") - full_args["maxDuration"] = f.value[1] - elif filter_type == schemas.FilterType.referrer: - # extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" - if is_any: - extra_constraints.append('s.base_referrer IS NOT NULL') - else: - extra_constraints.append( - sh.multi_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type == events.EventType.METADATA.ui_type: - # get metadata list only if you need it - if meta_keys is None: - meta_keys = metadata.get(project_id=project_id) - meta_keys = {m["key"]: m["index"] for m in meta_keys} - if f.source in meta_keys.keys(): - if is_any: - extra_constraints.append(f"s.{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL") - ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL") - elif is_undefined: - extra_constraints.append(f"s.{metadata.index_to_colname(meta_keys[f.source])} IS NULL") - ss_constraints.append(f"ms.{metadata.index_to_colname(meta_keys[f.source])} IS NULL") - else: - extra_constraints.append( - sh.multi_conditions( - f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text", - f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions( - f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text", - f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - if is_any: - extra_constraints.append('s.user_id IS NOT NULL') - ss_constraints.append('ms.user_id IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.user_id IS NULL') - ss_constraints.append('ms.user_id IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type in [schemas.FilterType.user_anonymous_id, - schemas.FilterType.user_anonymous_id_ios]: - if is_any: - extra_constraints.append('s.user_anonymous_id IS NOT NULL') - ss_constraints.append('ms.user_anonymous_id IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.user_anonymous_id IS NULL') - ss_constraints.append('ms.user_anonymous_id IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f"s.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]: - if is_any: - extra_constraints.append('s.rev_id IS NOT NULL') - ss_constraints.append('ms.rev_id IS NOT NULL') - elif is_undefined: - extra_constraints.append('s.rev_id IS NULL') - ss_constraints.append('ms.rev_id IS NULL') - else: - extra_constraints.append( - sh.multi_conditions(f"s.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type == schemas.FilterType.platform: - # op = __ sh.get_sql_operator(f.operator) - extra_constraints.append( - sh.multi_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - elif filter_type == schemas.FilterType.issue: - if is_any: - extra_constraints.append("array_length(s.issue_types, 1) > 0") - ss_constraints.append("array_length(ms.issue_types, 1) > 0") - else: - extra_constraints.append( - sh.multi_conditions(f"%({f_k})s {op} ANY (s.issue_types)", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not, - value_key=f_k)) - # search sessions with click_rage on a specific selector - if len(f.filters) > 0 and schemas.IssueType.click_rage in f.value: - for j, sf in enumerate(f.filters): - if sf.operator == schemas.IssueFilterOperator._on_selector: - f_k = f"f_value{i}_{j}" - full_args = {**full_args, **sh.multi_values(sf.value, value_key=f_k)} - extra_constraints += ["mc.timestamp>=%(startDate)s", - "mc.timestamp<=%(endDate)s", - "mis.type='click_rage'", - sh.multi_conditions(f"mc.selector=%({f_k})s", - sf.value, is_not=is_not, - value_key=f_k)] - - extra_from += """INNER JOIN events.clicks AS mc USING(session_id) - INNER JOIN events_common.issues USING (session_id,timestamp) - INNER JOIN public.issues AS mis USING (issue_id)\n""" - - elif filter_type == schemas.FilterType.events_count: - extra_constraints.append( - sh.multi_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - ss_constraints.append( - sh.multi_conditions(f"ms.events_count {op} %({f_k})s", f.value, is_not=is_not, - value_key=f_k)) - # --------------------------------------------------------------------------- - if len(data.events) > 0: - valid_events_count = 0 - for event in data.events: - is_any = sh.isAny_opreator(event.operator) - if not isinstance(event.value, list): - event.value = [event.value] - if __is_valid_event(is_any=is_any, event=event): - valid_events_count += 1 - events_query_from = [] - event_index = 0 - or_events = data.events_order == schemas.SearchEventOrder._or - # events_joiner = " FULL JOIN " if or_events else " INNER JOIN LATERAL " - events_joiner = " UNION " if or_events else " INNER JOIN LATERAL " - for i, event in enumerate(data.events): - event_type = event.type - is_any = sh.isAny_opreator(event.operator) - if not isinstance(event.value, list): - event.value = [event.value] - if not __is_valid_event(is_any=is_any, event=event): - continue - op = sh.get_sql_operator(event.operator) - is_not = False - if sh.is_negation_operator(event.operator): - is_not = True - op = sh.reverse_sql_operator(op) - if event_index == 0 or or_events: - event_from = "%s INNER JOIN public.sessions AS ms USING (session_id)" - event_where = ["ms.project_id = %(projectId)s", "main.timestamp >= %(startDate)s", - "main.timestamp <= %(endDate)s", "ms.start_ts >= %(startDate)s", - "ms.start_ts <= %(endDate)s", "ms.duration IS NOT NULL"] - if favorite_only and not errors_only: - event_from += "INNER JOIN public.user_favorite_sessions AS fs USING(session_id)" - event_where.append("fs.user_id = %(userId)s") - else: - event_from = "%s" - event_where = ["main.timestamp >= %(startDate)s", "main.timestamp <= %(endDate)s", - "main.session_id=event_0.session_id"] - if data.events_order == schemas.SearchEventOrder._then: - event_where.append(f"event_{event_index - 1}.timestamp <= main.timestamp") - e_k = f"e_value{i}" - s_k = e_k + "_source" - if event.type != schemas.PerformanceEventType.time_between_events: - event.value = helper.values_for_operator(value=event.value, op=event.operator) - full_args = {**full_args, - **sh.multi_values(event.value, value_key=e_k), - **sh.multi_values(event.source, value_key=s_k)} - - if event_type == events.EventType.CLICK.ui_type: - event_from = event_from % f"{events.EventType.CLICK.table} AS main " - if not is_any: - if event.operator == schemas.ClickEventExtraOperator._on_selector: - event_where.append( - sh.multi_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k)) - else: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.CLICK.column} {op} %({e_k})s", event.value, - value_key=e_k)) - - elif event_type == events.EventType.INPUT.ui_type: - event_from = event_from % f"{events.EventType.INPUT.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.INPUT.column} {op} %({e_k})s", event.value, - value_key=e_k)) - if event.source is not None and len(event.source) > 0: - event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source, - value_key=f"custom{i}")) - full_args = {**full_args, **sh.multi_values(event.source, value_key=f"custom{i}")} - - elif event_type == events.EventType.LOCATION.ui_type: - event_from = event_from % f"{events.EventType.LOCATION.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.CUSTOM.ui_type: - event_from = event_from % f"{events.EventType.CUSTOM.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.CUSTOM.column} {op} %({e_k})s", event.value, - value_key=e_k)) - elif event_type == events.EventType.REQUEST.ui_type: - event_from = event_from % f"{events.EventType.REQUEST.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", event.value, - value_key=e_k)) - # elif event_type == events.event_type.GRAPHQL.ui_type: - # event_from = event_from % f"{events.event_type.GRAPHQL.table} AS main " - # if not is_any: - # event_where.append( - # _multiple_conditions(f"main.{events.event_type.GRAPHQL.column} {op} %({e_k})s", event.value, - # value_key=e_k)) - elif event_type == events.EventType.STATEACTION.ui_type: - event_from = event_from % f"{events.EventType.STATEACTION.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.STATEACTION.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.ERROR.ui_type: - event_from = event_from % f"{events.EventType.ERROR.table} AS main INNER JOIN public.errors AS main1 USING(error_id)" - event.source = list(set(event.source)) - if not is_any and event.value not in [None, "*", ""]: - event_where.append( - sh.multi_conditions(f"(main1.message {op} %({e_k})s OR main1.name {op} %({e_k})s)", - event.value, value_key=e_k)) - if len(event.source) > 0 and event.source[0] not in [None, "*", ""]: - event_where.append(sh.multi_conditions(f"main1.source = %({s_k})s", event.source, value_key=s_k)) - - - # ----- IOS - elif event_type == events.EventType.CLICK_IOS.ui_type: - event_from = event_from % f"{events.EventType.CLICK_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.CLICK_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - - elif event_type == events.EventType.INPUT_IOS.ui_type: - event_from = event_from % f"{events.EventType.INPUT_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.INPUT_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - if event.source is not None and len(event.source) > 0: - event_where.append(sh.multi_conditions(f"main.value ILIKE %(custom{i})s", event.source, - value_key="custom{i}")) - full_args = {**full_args, **sh.multi_values(event.source, f"custom{i}")} - elif event_type == events.EventType.VIEW_IOS.ui_type: - event_from = event_from % f"{events.EventType.VIEW_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.VIEW_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.CUSTOM_IOS.ui_type: - event_from = event_from % f"{events.EventType.CUSTOM_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.CUSTOM_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.REQUEST_IOS.ui_type: - event_from = event_from % f"{events.EventType.REQUEST_IOS.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.REQUEST_IOS.column} {op} %({e_k})s", - event.value, value_key=e_k)) - elif event_type == events.EventType.ERROR_IOS.ui_type: - event_from = event_from % f"{events.EventType.ERROR_IOS.table} AS main INNER JOIN public.crashes_ios AS main1 USING(crash_id)" - if not is_any and event.value not in [None, "*", ""]: - event_where.append( - sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)", - event.value, value_key=e_k)) - elif event_type == schemas.PerformanceEventType.fetch_failed: - event_from = event_from % f"{events.EventType.REQUEST.table} AS main " - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k})s", - event.value, value_key=e_k)) - col = performance_event.get_col(event_type) - colname = col["column"] - event_where.append(f"main.{colname} = FALSE") - # elif event_type == schemas.PerformanceEventType.fetch_duration: - # event_from = event_from % f"{events.event_type.REQUEST.table} AS main " - # if not is_any: - # event_where.append( - # _multiple_conditions(f"main.{events.event_type.REQUEST.column} {op} %({e_k})s", - # event.value, value_key=e_k)) - # col = performance_event.get_col(event_type) - # colname = col["column"] - # tname = "main" - # e_k += "_custom" - # full_args = {**full_args, **_ sh.multiple_values(event.source, value_key=e_k)} - # event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + - # _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", - # event.source, value_key=e_k)) - elif event_type in [schemas.PerformanceEventType.location_dom_complete, - schemas.PerformanceEventType.location_largest_contentful_paint_time, - schemas.PerformanceEventType.location_ttfb, - schemas.PerformanceEventType.location_avg_cpu_load, - schemas.PerformanceEventType.location_avg_memory_usage - ]: - event_from = event_from % f"{events.EventType.LOCATION.table} AS main " - col = performance_event.get_col(event_type) - colname = col["column"] - tname = "main" - if col.get("extraJoin") is not None: - tname = "ej" - event_from += f" INNER JOIN {col['extraJoin']} AS {tname} USING(session_id)" - event_where += [f"{tname}.timestamp >= main.timestamp", f"{tname}.timestamp >= %(startDate)s", - f"{tname}.timestamp <= %(endDate)s"] - if not is_any: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.LOCATION.column} {op} %({e_k})s", - event.value, value_key=e_k)) - e_k += "_custom" - full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} - - event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + - sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s", - event.source, value_key=e_k)) - elif event_type == schemas.PerformanceEventType.time_between_events: - event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) " - if not isinstance(event.value[0].value, list): - event.value[0].value = [event.value[0].value] - if not isinstance(event.value[1].value, list): - event.value[1].value = [event.value[1].value] - event.value[0].value = helper.values_for_operator(value=event.value[0].value, - op=event.value[0].operator) - event.value[1].value = helper.values_for_operator(value=event.value[1].value, - op=event.value[0].operator) - e_k1 = e_k + "_e1" - e_k2 = e_k + "_e2" - full_args = {**full_args, - **sh.multi_values(event.value[0].value, value_key=e_k1), - **sh.multi_values(event.value[1].value, value_key=e_k2)} - s_op = sh.get_sql_operator(event.value[0].operator) - event_where += ["main2.timestamp >= %(startDate)s", "main2.timestamp <= %(endDate)s"] - if event_index > 0 and not or_events: - event_where.append("main2.session_id=event_0.session_id") - is_any = sh.isAny_opreator(event.value[0].operator) - if not is_any: - event_where.append( - sh.multi_conditions( - f"main.{getattr(events.EventType, event.value[0].type).column} {s_op} %({e_k1})s", - event.value[0].value, value_key=e_k1)) - s_op = sh.get_sql_operator(event.value[1].operator) - is_any = sh.isAny_opreator(event.value[1].operator) - if not is_any: - event_where.append( - sh.multi_conditions( - f"main2.{getattr(events.EventType, event.value[1].type).column} {s_op} %({e_k2})s", - event.value[1].value, value_key=e_k2)) - - e_k += "_custom" - full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} - event_where.append( - sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator.value} %({e_k})s", - event.source, value_key=e_k)) - - elif event_type == schemas.EventType.request_details: - event_from = event_from % f"{events.EventType.REQUEST.table} AS main " - apply = False - for j, f in enumerate(event.filters): - is_any = sh.isAny_opreator(f.operator) - if is_any or len(f.value) == 0: - continue - f.value = helper.values_for_operator(value=f.value, op=f.operator) - op = sh.get_sql_operator(f.operator) - e_k_f = e_k + f"_fetch{j}" - full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} - if f.type == schemas.FetchFilterType._url: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text", - f.value, value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._status_code: - event_where.append( - sh.multi_conditions(f"main.status_code {f.operator.value} %({e_k_f})s::integer", f.value, - value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._method: - event_where.append( - sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._duration: - event_where.append( - sh.multi_conditions(f"main.duration {f.operator.value} %({e_k_f})s::integer", f.value, - value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._request_body: - event_where.append( - sh.multi_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value, - value_key=e_k_f)) - apply = True - elif f.type == schemas.FetchFilterType._response_body: - event_where.append( - sh.multi_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value, - value_key=e_k_f)) - apply = True - else: - print(f"undefined FETCH filter: {f.type}") - if not apply: - continue - elif event_type == schemas.EventType.graphql: - event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main " - for j, f in enumerate(event.filters): - is_any = sh.isAny_opreator(f.operator) - if is_any or len(f.value) == 0: - continue - f.value = helper.values_for_operator(value=f.value, op=f.operator) - op = sh.get_sql_operator(f.operator) - e_k_f = e_k + f"_graphql{j}" - full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} - if f.type == schemas.GraphqlFilterType._name: - event_where.append( - sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value, - value_key=e_k_f)) - elif f.type == schemas.GraphqlFilterType._method: - event_where.append( - sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f)) - elif f.type == schemas.GraphqlFilterType._request_body: - event_where.append( - sh.multi_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) - elif f.type == schemas.GraphqlFilterType._response_body: - event_where.append( - sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) - else: - print(f"undefined GRAPHQL filter: {f.type}") - else: - continue - if event_index == 0 or or_events: - event_where += ss_constraints - if is_not: - if event_index == 0 or or_events: - events_query_from.append(f"""\ - (SELECT - session_id, - 0 AS timestamp - FROM sessions - WHERE EXISTS(SELECT session_id - FROM {event_from} - WHERE {" AND ".join(event_where)} - AND sessions.session_id=ms.session_id) IS FALSE - AND project_id = %(projectId)s - AND start_ts >= %(startDate)s - AND start_ts <= %(endDate)s - AND duration IS NOT NULL - ) {"" if or_events else (f"AS event_{event_index}" + ("ON(TRUE)" if event_index > 0 else ""))}\ - """) - else: - events_query_from.append(f"""\ - (SELECT - event_0.session_id, - event_{event_index - 1}.timestamp AS timestamp - WHERE EXISTS(SELECT session_id FROM {event_from} WHERE {" AND ".join(event_where)}) IS FALSE - ) AS event_{event_index} {"ON(TRUE)" if event_index > 0 else ""}\ - """) - else: - events_query_from.append(f"""\ - (SELECT main.session_id, {"MIN" if event_index < (valid_events_count - 1) else "MAX"}(main.timestamp) AS timestamp - FROM {event_from} - WHERE {" AND ".join(event_where)} - GROUP BY 1 - ) {"" if or_events else (f"AS event_{event_index} " + ("ON(TRUE)" if event_index > 0 else ""))}\ - """) - event_index += 1 - if event_index > 0: - if or_events: - events_query_part = f"""SELECT - session_id, - MIN(timestamp) AS first_event_ts, - MAX(timestamp) AS last_event_ts - FROM ({events_joiner.join(events_query_from)}) AS u - GROUP BY 1""" - else: - events_query_part = f"""SELECT - event_0.session_id, - MIN(event_0.timestamp) AS first_event_ts, - MAX(event_{event_index - 1}.timestamp) AS last_event_ts - FROM {events_joiner.join(events_query_from)} - GROUP BY 1""" - else: - data.events = [] - # --------------------------------------------------------------------------- - if data.startDate is not None: - extra_constraints.append("s.start_ts >= %(startDate)s") - if data.endDate is not None: - extra_constraints.append("s.start_ts <= %(endDate)s") - # if data.platform is not None: - # if data.platform == schemas.PlatformType.mobile: - # extra_constraints.append(b"s.user_os in ('Android','BlackBerry OS','iOS','Tizen','Windows Phone')") - # elif data.platform == schemas.PlatformType.desktop: - # extra_constraints.append( - # b"s.user_os in ('Chrome OS','Fedora','Firefox OS','Linux','Mac OS X','Ubuntu','Windows')") - - if errors_only: - extra_from += f" INNER JOIN {events.EventType.ERROR.table} AS er USING (session_id) INNER JOIN public.errors AS ser USING (error_id)" - extra_constraints.append("ser.source = 'js_exception'") - extra_constraints.append("ser.project_id = %(project_id)s") - # if error_status != schemas.ErrorStatus.all: - # extra_constraints.append("ser.status = %(error_status)s") - # full_args["error_status"] = error_status - # if favorite_only: - # extra_from += " INNER JOIN public.user_favorite_errors AS ufe USING (error_id)" - # extra_constraints.append("ufe.user_id = %(userId)s") - - if favorite_only and not errors_only and user_id is not None: - extra_from += """INNER JOIN (SELECT user_id, session_id - FROM public.user_favorite_sessions - WHERE user_id = %(userId)s) AS favorite_sessions - USING (session_id)""" - elif not favorite_only and not errors_only and user_id is not None: - extra_from += """LEFT JOIN (SELECT user_id, session_id - FROM public.user_favorite_sessions - WHERE user_id = %(userId)s) AS favorite_sessions - USING (session_id)""" - extra_join = "" - if issue is not None: - extra_join = """ - INNER JOIN LATERAL(SELECT TRUE FROM events_common.issues INNER JOIN public.issues AS p_issues USING (issue_id) - WHERE issues.session_id=f.session_id - AND p_issues.type=%(issue_type)s - AND p_issues.context_string=%(issue_contextString)s - AND timestamp >= f.first_event_ts - AND timestamp <= f.last_event_ts) AS issues ON(TRUE) - """ - full_args["issue_contextString"] = issue["contextString"] - full_args["issue_type"] = issue["type"] - if extra_event: - extra_join += f"""INNER JOIN {extra_event} AS ev USING(session_id)""" - extra_constraints.append("ev.timestamp>=%(startDate)s") - extra_constraints.append("ev.timestamp<=%(endDate)s") - query_part = f"""\ - FROM {f"({events_query_part}) AS f" if len(events_query_part) > 0 else "public.sessions AS s"} - {extra_join} - {"INNER JOIN public.sessions AS s USING(session_id)" if len(events_query_part) > 0 else ""} - {extra_from} - WHERE - {" AND ".join(extra_constraints)}""" - return full_args, query_part - - -def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): - if project_id is None: - all_projects = projects.get_projects(tenant_id=tenant_id, recording_state=False) - else: - all_projects = [ - projects.get_project(tenant_id=tenant_id, project_id=int(project_id), include_last_session=False, - include_gdpr=False)] - - all_projects = {int(p["projectId"]): p["name"] for p in all_projects} - project_ids = list(all_projects.keys()) - - available_keys = metadata.get_keys_by_projects(project_ids) - for i in available_keys: - available_keys[i]["user_id"] = schemas.FilterType.user_id - available_keys[i]["user_anonymous_id"] = schemas.FilterType.user_anonymous_id - results = {} - for i in project_ids: - if m_key not in available_keys[i].values(): - available_keys.pop(i) - results[i] = {"total": 0, "sessions": [], "missingMetadata": True} - project_ids = list(available_keys.keys()) - if len(project_ids) > 0: - with pg_client.PostgresClient() as cur: - sub_queries = [] - for i in project_ids: - col_name = list(available_keys[i].keys())[list(available_keys[i].values()).index(m_key)] - sub_queries.append(cur.mogrify( - f"(SELECT COALESCE(COUNT(s.*)) AS count FROM public.sessions AS s WHERE s.project_id = %(id)s AND s.{col_name} = %(value)s) AS \"{i}\"", - {"id": i, "value": m_value}).decode('UTF-8')) - query = f"""SELECT {", ".join(sub_queries)};""" - cur.execute(query=query) - - rows = cur.fetchone() - - sub_queries = [] - for i in rows.keys(): - results[i] = {"total": rows[i], "sessions": [], "missingMetadata": False, "name": all_projects[int(i)]} - if rows[i] > 0: - col_name = list(available_keys[int(i)].keys())[list(available_keys[int(i)].values()).index(m_key)] - sub_queries.append( - cur.mogrify( - f"""( - SELECT * - FROM ( - SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS} - FROM public.sessions AS s LEFT JOIN (SELECT session_id - FROM public.user_favorite_sessions - WHERE user_favorite_sessions.user_id = %(userId)s - ) AS favorite_sessions USING (session_id) - WHERE s.project_id = %(id)s AND s.duration IS NOT NULL AND s.{col_name} = %(value)s - ) AS full_sessions - ORDER BY favorite DESC, issue_score DESC - LIMIT 10 - )""", - {"id": i, "value": m_value, "userId": user_id}).decode('UTF-8')) - if len(sub_queries) > 0: - cur.execute("\nUNION\n".join(sub_queries)) - rows = cur.fetchall() - for i in rows: - results[str(i["project_id"])]["sessions"].append(helper.dict_to_camel_case(i)) - return results - - -def get_user_sessions(project_id, user_id, start_date, end_date): - with pg_client.PostgresClient() as cur: - constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"] - if start_date is not None: - constraints.append("s.start_ts >= %(startDate)s") - if end_date is not None: - constraints.append("s.start_ts <= %(endDate)s") - - query_part = f"""\ - FROM public.sessions AS s - WHERE {" AND ".join(constraints)}""" - - cur.execute(cur.mogrify(f"""\ - SELECT s.project_id, - s.session_id::text AS session_id, - s.user_uuid, - s.user_id, - s.user_os, - s.user_browser, - s.user_device, - s.user_country, - s.start_ts, - s.duration, - s.events_count, - s.pages_count, - s.errors_count - {query_part} - ORDER BY s.session_id - LIMIT 50;""", { - "projectId": project_id, - "userId": user_id, - "startDate": start_date, - "endDate": end_date - })) - - sessions = cur.fetchall() - return helper.list_to_camel_case(sessions) - - -def get_session_user(project_id, user_id): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """\ - SELECT - user_id, - count(*) as session_count, - max(start_ts) as last_seen, - min(start_ts) as first_seen - FROM - "public".sessions - WHERE - project_id = %(project_id)s - AND user_id = %(userId)s - AND duration is not null - GROUP BY user_id; - """, - {"project_id": project_id, "userId": user_id} - ) - cur.execute(query=query) - data = cur.fetchone() - return helper.dict_to_camel_case(data) - - -def get_session_ids_by_user_ids(project_id, user_ids): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """\ - SELECT session_id FROM public.sessions - WHERE - project_id = %(project_id)s AND user_id IN %(userId)s;""", - {"project_id": project_id, "userId": tuple(user_ids)} - ) - ids = cur.execute(query=query) - return ids - - -def delete_sessions_by_session_ids(session_ids): - with pg_client.PostgresClient(unlimited_query=True) as cur: - query = cur.mogrify( - """\ - DELETE FROM public.sessions - WHERE - session_id IN %(session_ids)s;""", - {"session_ids": tuple(session_ids)} - ) - cur.execute(query=query) - - return True - - -def delete_sessions_by_user_ids(project_id, user_ids): - with pg_client.PostgresClient(unlimited_query=True) as cur: - query = cur.mogrify( - """\ - DELETE FROM public.sessions - WHERE - project_id = %(project_id)s AND user_id IN %(userId)s;""", - {"project_id": project_id, "userId": tuple(user_ids)} - ) - cur.execute(query=query) - - return True - - -def count_all(): - with pg_client.PostgresClient(unlimited_query=True) as cur: - cur.execute(query="SELECT COUNT(session_id) AS count FROM public.sessions") - row = cur.fetchone() - return row.get("count", 0) if row else 0 - - -def session_exists(project_id, session_id): - with pg_client.PostgresClient() as cur: - query = cur.mogrify("""SELECT 1 - FROM public.sessions - WHERE session_id=%(session_id)s - AND project_id=%(project_id)s - LIMIT 1;""", - {"project_id": project_id, "session_id": session_id}) - cur.execute(query) - row = cur.fetchone() - return row is not None diff --git a/ee/api/clean-dev.sh b/ee/api/clean-dev.sh index 9241b8e48..a160cf9c2 100755 --- a/ee/api/clean-dev.sh +++ b/ee/api/clean-dev.sh @@ -35,6 +35,7 @@ rm -rf ./chalicelib/core/log_tool_stackdriver.py rm -rf ./chalicelib/core/log_tool_sumologic.py rm -rf ./chalicelib/core/metadata.py rm -rf ./chalicelib/core/mobile.py +rm -rf ./chalicelib/core/sessions.py rm -rf ./chalicelib/core/sessions_assignments.py #exp rm -rf ./chalicelib/core/sessions_metas.py rm -rf ./chalicelib/core/sessions_mobs.py From 0fe1adf522c24980d1272100815f75d2f99e180d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Mar 2023 14:06:59 +0100 Subject: [PATCH 21/60] feat(chalice): code cleaning --- ee/api/chalicelib/core/sessions_exp.py | 95 +---------------------- ee/api/chalicelib/core/sessions_replay.py | 2 + 2 files changed, 4 insertions(+), 93 deletions(-) diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index f60090ed4..888800681 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -2,11 +2,8 @@ from typing import List, Union import schemas import schemas_ee -from chalicelib.core import events, metadata, events_ios, \ - sessions_mobs, issues, projects, resources, assist, performance_event, metrics, sessions_devtool, \ - sessions_notes -from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper, errors_helper -from chalicelib.utils import sql_helper as sh +from chalicelib.core import events, metadata, projects, performance_event, metrics +from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper SESSION_PROJECTION_COLS_CH = """\ s.project_id, @@ -51,94 +48,6 @@ SESSION_PROJECTION_COLS_CH_MAP = """\ """ -def __group_metadata(session, project_metadata): - meta = {} - for m in project_metadata.keys(): - if project_metadata[m] is not None and session.get(m) is not None: - meta[project_metadata[m]] = session[m] - session.pop(m) - return meta - - -# This function should not use Clickhouse because it doesn't have `file_key` -def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, include_fav_viewed=False, - group_metadata=False, live=True): - with pg_client.PostgresClient() as cur: - extra_query = [] - if include_fav_viewed: - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_favorite_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS favorite""") - extra_query.append("""COALESCE((SELECT TRUE - FROM public.user_viewed_sessions AS fs - WHERE s.session_id = fs.session_id - AND fs.user_id = %(userId)s), FALSE) AS viewed""") - query = cur.mogrify( - f"""\ - SELECT - s.*, - s.session_id::text AS session_id, - (SELECT project_key FROM public.projects WHERE project_id = %(project_id)s LIMIT 1) AS project_key, - encode(file_key,'hex') AS file_key - {"," if len(extra_query) > 0 else ""}{",".join(extra_query)} - {(",json_build_object(" + ",".join([f"'{m}',p.{m}" for m in metadata.column_names()]) + ") AS project_metadata") if group_metadata else ''} - FROM public.sessions AS s {"INNER JOIN public.projects AS p USING (project_id)" if group_metadata else ""} - WHERE s.project_id = %(project_id)s - AND s.session_id = %(session_id)s;""", - {"project_id": project_id, "session_id": session_id, "userId": context.user_id} - ) - # print("===============") - # print(query) - cur.execute(query=query) - - data = cur.fetchone() - if data is not None: - data = helper.dict_to_camel_case(data) - if full_data: - if data["platform"] == 'ios': - data['events'] = events_ios.get_by_sessionId(project_id=project_id, session_id=session_id) - for e in data['events']: - if e["type"].endswith("_IOS"): - e["type"] = e["type"][:-len("_IOS")] - data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) - else: - data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, - group_clickrage=True) - all_errors = events.get_errors_by_session_id(session_id=session_id, project_id=project_id) - data['stackEvents'] = [e for e in all_errors if e['source'] != "js_exception"] - # to keep only the first stack - # limit the number of errors to reduce the response-body size - data['errors'] = [errors_helper.format_first_stack_frame(e) for e in all_errors - if e['source'] == "js_exception"][:500] - data['userEvents'] = events.get_customs_by_session_id(project_id=project_id, - session_id=session_id) - data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) - data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, - context=context) - data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id, - start_ts=data["startTs"], - duration=data["duration"]) - - data['notes'] = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=project_id, - session_id=session_id, user_id=context.user_id) - data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) - data['issues'] = issues.get_by_session_id(session_id=session_id, project_id=project_id) - data['live'] = live and assist.is_live(project_id=project_id, - session_id=session_id, - project_key=data["projectKey"]) - data["inDB"] = True - return data - elif live: - return assist.get_live_session_by_id(project_id=project_id, session_id=session_id) - else: - return None - - def __get_sql_operator(op: schemas.SearchEventOperator): return { schemas.SearchEventOperator._is: "=", diff --git a/ee/api/chalicelib/core/sessions_replay.py b/ee/api/chalicelib/core/sessions_replay.py index 798029aee..993855637 100644 --- a/ee/api/chalicelib/core/sessions_replay.py +++ b/ee/api/chalicelib/core/sessions_replay.py @@ -16,6 +16,7 @@ def __group_metadata(session, project_metadata): # for backward compatibility +# This function should not use Clickhouse because it doesn't have `file_key` def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, include_fav_viewed=False, group_metadata=False, live=True): with pg_client.PostgresClient() as cur: @@ -92,6 +93,7 @@ def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, fu return None +# This function should not use Clickhouse because it doesn't have `file_key` def get_replay(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, group_metadata=False, live=True): with pg_client.PostgresClient() as cur: From 11af70852008fa452e458fa23606db5a453481f1 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Mar 2023 15:00:18 +0100 Subject: [PATCH 22/60] feat(chalice): dynamic health-check endpoint --- api/chalicelib/core/tenants.py | 4 ++-- api/chalicelib/utils/pg_client.py | 9 ++++++--- api/routers/subs/health.py | 17 ++++++++--------- 3 files changed, 16 insertions(+), 14 deletions(-) diff --git a/api/chalicelib/core/tenants.py b/api/chalicelib/core/tenants.py index 5479178d8..4d95ae491 100644 --- a/api/chalicelib/core/tenants.py +++ b/api/chalicelib/core/tenants.py @@ -68,7 +68,7 @@ def update(tenant_id, user_id, data: schemas.UpdateTenantSchema): return edit_client(tenant_id=tenant_id, changes=changes) -def tenants_exists(): - with pg_client.PostgresClient() as cur: +def tenants_exists(use_pool=True): + with pg_client.PostgresClient(use_pool=use_pool) as cur: cur.execute(f"SELECT EXISTS(SELECT 1 FROM public.tenants)") return cur.fetchone()["exists"] diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index 4cfd8b0e3..64ca1719f 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -87,9 +87,10 @@ class PostgresClient: long_query = False unlimited_query = False - def __init__(self, long_query=False, unlimited_query=False): + def __init__(self, long_query=False, unlimited_query=False, use_pool=True): self.long_query = long_query self.unlimited_query = unlimited_query + self.use_pool = use_pool if unlimited_query: long_config = dict(_PG_CONFIG) long_config["application_name"] += "-UNLIMITED" @@ -100,7 +101,7 @@ class PostgresClient: long_config["options"] = f"-c statement_timeout=" \ f"{config('pg_long_timeout', cast=int, default=5 * 60) * 1000}" self.connection = psycopg2.connect(**long_config) - elif not config('PG_POOL', cast=bool, default=True): + elif not use_pool or not config('PG_POOL', cast=bool, default=True): single_config = dict(_PG_CONFIG) single_config["application_name"] += "-NOPOOL" single_config["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int, default=30) * 1000}" @@ -120,11 +121,12 @@ class PostgresClient: try: self.connection.commit() self.cursor.close() - if self.long_query or self.unlimited_query: + if not self.use_pool or self.long_query or self.unlimited_query: self.connection.close() except Exception as error: logging.error("Error while committing/closing PG-connection", error) if str(error) == "connection already closed" \ + and self.use_pool \ and not self.long_query \ and not self.unlimited_query \ and config('PG_POOL', cast=bool, default=True): @@ -134,6 +136,7 @@ class PostgresClient: raise error finally: if config('PG_POOL', cast=bool, default=True) \ + and self.use_pool \ and not self.long_query \ and not self.unlimited_query: postgreSQL_pool.putconn(self.connection) diff --git a/api/routers/subs/health.py b/api/routers/subs/health.py index 6655f2a20..5e3c10f07 100644 --- a/api/routers/subs/health.py +++ b/api/routers/subs/health.py @@ -1,15 +1,14 @@ -from typing import Union - -from fastapi import Body, Depends, Request - -import schemas -from chalicelib.core import health -from or_dependencies import OR_context +from chalicelib.core import health, tenants from routers.base import get_routers public_app, app, app_apikey = get_routers() +health_router = public_app -@public_app.get('/health', tags=["dashboard"]) -def get_global_health(): +if tenants.tenants_exists(use_pool=False): + health_router = app + + +@health_router.get('/health', tags=["health-check"]) +def get_global_health_status(): return {"data": health.get_health()} From 8e5ae800d5071fd6fa9178a9d4ba0e14e3567337 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Mar 2023 15:15:48 +0100 Subject: [PATCH 23/60] feat(chalice): upgraded startup/shutdown logic --- api/app.py | 77 ++++++++++++++++--------------- api/requirements-alerts.txt | 2 +- api/requirements.txt | 2 +- ee/api/chalicelib/core/tenants.py | 6 +-- ee/api/requirements-alerts.txt | 2 +- ee/api/requirements.txt | 2 +- 6 files changed, 48 insertions(+), 43 deletions(-) diff --git a/api/app.py b/api/app.py index 883cf6704..50cd7342f 100644 --- a/api/app.py +++ b/api/app.py @@ -1,4 +1,5 @@ import logging +from contextlib import asynccontextmanager from apscheduler.schedulers.asyncio import AsyncIOScheduler from decouple import config @@ -14,7 +15,40 @@ from routers.crons import core_crons from routers.crons import core_dynamic_crons from routers.subs import insights, metrics, v1_api, health -app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default="")) +loglevel = config("LOGLEVEL", default=logging.INFO) +print(f">Loglevel set to: {loglevel}") +logging.basicConfig(level=loglevel) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup + logging.info(">>>>> starting up <<<<<") + ap_logger = logging.getLogger('apscheduler') + ap_logger.setLevel(loglevel) + + app.schedule = AsyncIOScheduler() + await pg_client.init() + app.schedule.start() + + for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: + app.schedule.add_job(id=job["func"].__name__, **job) + + ap_logger.info(">Scheduled jobs:") + for job in app.schedule.get_jobs(): + ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + + # App listening + yield + + # Shutdown + logging.info(">>>>> shutting down <<<<<") + app.schedule.shutdown(wait=False) + await pg_client.terminate() + + +app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""), + lifespan=lifespan) app.add_middleware(GZipMiddleware, minimum_size=1000) @@ -55,38 +89,9 @@ app.include_router(health.public_app) app.include_router(health.app) app.include_router(health.app_apikey) -loglevel = config("LOGLEVEL", default=logging.INFO) -print(f">Loglevel set to: {loglevel}") -logging.basicConfig(level=loglevel) -ap_logger = logging.getLogger('apscheduler') -ap_logger.setLevel(loglevel) -app.schedule = AsyncIOScheduler() - - -@app.on_event("startup") -async def startup(): - logging.info(">>>>> starting up <<<<<") - await pg_client.init() - app.schedule.start() - - for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: - app.schedule.add_job(id=job["func"].__name__, **job) - - ap_logger.info(">Scheduled jobs:") - for job in app.schedule.get_jobs(): - ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) - - -@app.on_event("shutdown") -async def shutdown(): - logging.info(">>>>> shutting down <<<<<") - app.schedule.shutdown(wait=False) - await pg_client.terminate() - - -@app.get('/private/shutdown', tags=["private"]) -async def stop_server(): - logging.info("Requested shutdown") - await shutdown() - import os, signal - os.kill(1, signal.SIGTERM) +# @app.get('/private/shutdown', tags=["private"]) +# async def stop_server(): +# logging.info("Requested shutdown") +# await shutdown() +# import os, signal +# os.kill(1, signal.SIGTERM) diff --git a/api/requirements-alerts.txt b/api/requirements-alerts.txt index b208d28c2..edb644c87 100644 --- a/api/requirements-alerts.txt +++ b/api/requirements-alerts.txt @@ -8,7 +8,7 @@ jira==3.4.1 -fastapi==0.92.0 +fastapi==0.94.1 uvicorn[standard]==0.20.0 python-decouple==3.7 pydantic[email]==1.10.4 diff --git a/api/requirements.txt b/api/requirements.txt index 4a8d35090..27b95f17e 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -8,7 +8,7 @@ jira==3.4.1 -fastapi==0.92.0 +fastapi==0.94.1 uvicorn[standard]==0.20.0 python-decouple==3.7 pydantic[email]==1.10.4 diff --git a/ee/api/chalicelib/core/tenants.py b/ee/api/chalicelib/core/tenants.py index 30a87bd29..7ea621007 100644 --- a/ee/api/chalicelib/core/tenants.py +++ b/ee/api/chalicelib/core/tenants.py @@ -51,7 +51,7 @@ def get_by_api_key(api_key): WHERE tenants.api_key = %(api_key)s AND tenants.deleted_at ISNULL LIMIT 1;""", - {"api_key": api_key}) + {"api_key": api_key}) cur.execute(query=query) return helper.dict_to_camel_case(cur.fetchone()) @@ -94,7 +94,7 @@ def update(tenant_id, user_id, data: schemas.UpdateTenantSchema): return edit_client(tenant_id=tenant_id, changes=changes) -def tenants_exists(): - with pg_client.PostgresClient() as cur: +def tenants_exists(use_pool=True): + with pg_client.PostgresClient(use_pool=use_pool) as cur: cur.execute(f"SELECT EXISTS(SELECT 1 FROM public.tenants)") return cur.fetchone()["exists"] diff --git a/ee/api/requirements-alerts.txt b/ee/api/requirements-alerts.txt index 250882623..6b6901ca5 100644 --- a/ee/api/requirements-alerts.txt +++ b/ee/api/requirements-alerts.txt @@ -8,7 +8,7 @@ jira==3.4.1 -fastapi==0.92.0 +fastapi==0.94.1 uvicorn[standard]==0.20.0 python-decouple==3.7 pydantic[email]==1.10.4 diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 9ce06fe06..cad05e873 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -8,7 +8,7 @@ jira==3.4.1 -fastapi==0.92.0 +fastapi==0.94.1 uvicorn[standard]==0.20.0 python-decouple==3.7 pydantic[email]==1.10.4 From 8b6ebbe81511c8331111b97b1281b5bcdcff8594 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Mar 2023 16:27:40 +0100 Subject: [PATCH 24/60] feat(chalice): upgraded startup/shutdown logic feat(alerts): health-check endpoint --- api/app_alerts.py | 64 ++++++++++++++------------- api/chalicelib/core/health.py | 2 +- api/entrypoint.sh | 2 +- api/entrypoint_alerts.sh | 2 +- api/run-alerts-dev.sh | 2 +- ee/api/.gitignore | 2 + ee/api/app.py | 81 +++++++++++++++++------------------ ee/api/clean-dev.sh | 2 + ee/api/entrypoint.sh | 2 +- ee/api/entrypoint_alerts.sh | 2 +- ee/api/run-dev.sh | 3 -- 11 files changed, 83 insertions(+), 81 deletions(-) delete mode 100755 ee/api/run-dev.sh diff --git a/api/app_alerts.py b/api/app_alerts.py index 111bad2a1..02147ef23 100644 --- a/api/app_alerts.py +++ b/api/app_alerts.py @@ -1,33 +1,17 @@ import logging +from contextlib import asynccontextmanager from apscheduler.schedulers.asyncio import AsyncIOScheduler from decouple import config from fastapi import FastAPI -from chalicelib.utils import pg_client from chalicelib.core import alerts_processor - -app = FastAPI(root_path="/alerts", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default="")) -logging.info("============= ALERTS =============") +from chalicelib.utils import pg_client -@app.get("/") -async def root(): - return {"status": "Running"} - - -app.schedule = AsyncIOScheduler() - -loglevel = config("LOGLEVEL", default=logging.INFO) -print(f">Loglevel set to: {loglevel}") -logging.basicConfig(level=loglevel) -ap_logger = logging.getLogger('apscheduler') -ap_logger.setLevel(loglevel) -app.schedule = AsyncIOScheduler() - - -@app.on_event("startup") -async def startup(): +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup logging.info(">>>>> starting up <<<<<") await pg_client.init() app.schedule.start() @@ -39,24 +23,44 @@ async def startup(): for job in app.schedule.get_jobs(): ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + # App listening + yield -@app.on_event("shutdown") -async def shutdown(): + # Shutdown logging.info(">>>>> shutting down <<<<<") app.schedule.shutdown(wait=False) await pg_client.terminate() -@app.get('/private/shutdown', tags=["private"]) -async def stop_server(): - logging.info("Requested shutdown") - await shutdown() - import os, signal - os.kill(1, signal.SIGTERM) +app = FastAPI(root_path="/alerts", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""), + lifespan=lifespan) +logging.info("============= ALERTS =============") +@app.get("/") +async def root(): + return {"status": "Running"} + + +@app.get("/health") +async def get_health_status(): + return {"data": { + "health": True, + "details": {"version": config("version_number", default="unknown")} + }} + + +app.schedule = AsyncIOScheduler() + +loglevel = config("LOGLEVEL", default=logging.INFO) +print(f">Loglevel set to: {loglevel}") +logging.basicConfig(level=loglevel) +ap_logger = logging.getLogger('apscheduler') +ap_logger.setLevel(loglevel) +app.schedule = AsyncIOScheduler() + if config("LOCAL_DEV", default=False, cast=bool): - @app.get('/private/trigger', tags=["private"]) + @app.get('/trigger', tags=["private"]) async def trigger_main_cron(): logging.info("Triggering main cron") alerts_processor.process() diff --git a/api/chalicelib/core/health.py b/api/chalicelib/core/health.py index 980c7a41b..6f236a0f5 100644 --- a/api/chalicelib/core/health.py +++ b/api/chalicelib/core/health.py @@ -29,7 +29,7 @@ if config("LOCAL_DEV", cast=bool, default=False): else: HEALTH_ENDPOINTS = { - "alerts": "http://alerts-openreplay.app.svc.cluster.local:8888/metrics", + "alerts": "http://alerts-openreplay.app.svc.cluster.local:8888/health", "assets": "http://assets-openreplay.app.svc.cluster.local:8888/metrics", "assist": "http://assist-openreplay.app.svc.cluster.local:8888/health", "chalice": "http://chalice-openreplay.app.svc.cluster.local:8888/metrics", diff --git a/api/entrypoint.sh b/api/entrypoint.sh index e140268ef..401046526 100755 --- a/api/entrypoint.sh +++ b/api/entrypoint.sh @@ -1,3 +1,3 @@ #!/bin/sh -uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers +uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --proxy-headers diff --git a/api/entrypoint_alerts.sh b/api/entrypoint_alerts.sh index dedfa102b..9ac93dd60 100755 --- a/api/entrypoint_alerts.sh +++ b/api/entrypoint_alerts.sh @@ -1,3 +1,3 @@ #!/bin/sh export ASSIST_KEY=ignore -uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload +uvicorn app:app --host 0.0.0.0 --port 8888 diff --git a/api/run-alerts-dev.sh b/api/run-alerts-dev.sh index 54db30171..309356133 100755 --- a/api/run-alerts-dev.sh +++ b/api/run-alerts-dev.sh @@ -1,3 +1,3 @@ #!/bin/zsh -uvicorn app_alerts:app --reload \ No newline at end of file +uvicorn app_alerts:app --reload --port 8888 \ No newline at end of file diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 27ac41f5c..1e342e8bc 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -265,6 +265,8 @@ Pipfile.lock /app_alerts.py /build_alerts.sh /build_crons.sh +/run-dev.sh +/run-alerts-dev.sh /routers/subs/health.py /routers/subs/v1_api.py #exp /chalicelib/core/dashboards.py diff --git a/ee/api/app.py b/ee/api/app.py index 407e4aa5b..9104a2db4 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -1,5 +1,6 @@ import logging import queue +from contextlib import asynccontextmanager from apscheduler.schedulers.asyncio import AsyncIOScheduler from decouple import config @@ -10,9 +11,9 @@ from starlette import status from starlette.responses import StreamingResponse, JSONResponse from chalicelib.core import traces +from chalicelib.utils import events_queue from chalicelib.utils import helper from chalicelib.utils import pg_client -from chalicelib.utils import events_queue from routers import core, core_dynamic, ee, saml from routers.crons import core_crons from routers.crons import core_dynamic_crons @@ -20,7 +21,43 @@ from routers.crons import ee_crons from routers.subs import insights, metrics, v1_api_ee from routers.subs import v1_api, health -app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default="")) +loglevel = config("LOGLEVEL", default=logging.INFO) +print(f">Loglevel set to: {loglevel}") +logging.basicConfig(level=loglevel) +ap_logger = logging.getLogger('apscheduler') +ap_logger.setLevel(loglevel) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup + logging.info(">>>>> starting up <<<<<") + app.schedule = AsyncIOScheduler() + app.queue_system = queue.Queue() + await pg_client.init() + await events_queue.init() + app.schedule.start() + + for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs + traces.cron_jobs + ee_crons.ee_cron_jobs: + app.schedule.add_job(id=job["func"].__name__, **job) + + ap_logger.info(">Scheduled jobs:") + for job in app.schedule.get_jobs(): + ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) + + # App listening + yield + + # Shutdown + logging.info(">>>>> shutting down <<<<<") + app.schedule.shutdown(wait=True) + await traces.process_traces_queue() + await events_queue.terminate() + await pg_client.terminate() + + +app = FastAPI(root_path="/api", docs_url=config("docs_url", default=""), redoc_url=config("redoc_url", default=""), + lifespan=lifespan) app.add_middleware(GZipMiddleware, minimum_size=1000) @@ -71,43 +108,3 @@ app.include_router(v1_api_ee.app_apikey) app.include_router(health.public_app) app.include_router(health.app) app.include_router(health.app_apikey) - -loglevel = config("LOGLEVEL", default=logging.INFO) -print(f">Loglevel set to: {loglevel}") -logging.basicConfig(level=loglevel) -ap_logger = logging.getLogger('apscheduler') -ap_logger.setLevel(loglevel) -app.schedule = AsyncIOScheduler() -app.queue_system = queue.Queue() - - -@app.on_event("startup") -async def startup(): - logging.info(">>>>> starting up <<<<<") - await pg_client.init() - await events_queue.init() - app.schedule.start() - - for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs + traces.cron_jobs + ee_crons.ee_cron_jobs: - app.schedule.add_job(id=job["func"].__name__, **job) - - ap_logger.info(">Scheduled jobs:") - for job in app.schedule.get_jobs(): - ap_logger.info({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) - - -@app.on_event("shutdown") -async def shutdown(): - logging.info(">>>>> shutting down <<<<<") - app.schedule.shutdown(wait=True) - await traces.process_traces_queue() - await events_queue.terminate() - await pg_client.terminate() - - -@app.get('/private/shutdown', tags=["private"]) -async def stop_server(): - logging.info("Requested shutdown") - await shutdown() - import os, signal - os.kill(1, signal.SIGTERM) diff --git a/ee/api/clean-dev.sh b/ee/api/clean-dev.sh index a160cf9c2..c47a80ee8 100755 --- a/ee/api/clean-dev.sh +++ b/ee/api/clean-dev.sh @@ -86,3 +86,5 @@ rm -rf ./chalicelib/core/performance_event.py rm -rf ./chalicelib/core/saved_search.py rm -rf ./app_alerts.py rm -rf ./build_alerts.sh +rm -rf ./run-dev.sh +rm -rf ./run-alerts-dev.sh diff --git a/ee/api/entrypoint.sh b/ee/api/entrypoint.sh index ebd646a7d..e63d4e2af 100755 --- a/ee/api/entrypoint.sh +++ b/ee/api/entrypoint.sh @@ -2,4 +2,4 @@ sh env_vars.sh source /tmp/.env.override -uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload --proxy-headers +uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --proxy-headers diff --git a/ee/api/entrypoint_alerts.sh b/ee/api/entrypoint_alerts.sh index acf8b390a..410015142 100755 --- a/ee/api/entrypoint_alerts.sh +++ b/ee/api/entrypoint_alerts.sh @@ -2,4 +2,4 @@ export ASSIST_KEY=ignore sh env_vars.sh source /tmp/.env.override -uvicorn app:app --host 0.0.0.0 --port $LISTEN_PORT --reload +uvicorn app:app --host 0.0.0.0 --port 8888 diff --git a/ee/api/run-dev.sh b/ee/api/run-dev.sh deleted file mode 100755 index 76682286d..000000000 --- a/ee/api/run-dev.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/zsh - -uvicorn app:app --reload \ No newline at end of file From f7fbefba70220f4ec9a7634535591b2f9b3f3ca4 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Mar 2023 16:33:01 +0100 Subject: [PATCH 25/60] feat(chalice): cleaned code --- ee/api/app.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ee/api/app.py b/ee/api/app.py index 9104a2db4..034d93565 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -24,14 +24,15 @@ from routers.subs import v1_api, health loglevel = config("LOGLEVEL", default=logging.INFO) print(f">Loglevel set to: {loglevel}") logging.basicConfig(level=loglevel) -ap_logger = logging.getLogger('apscheduler') -ap_logger.setLevel(loglevel) @asynccontextmanager async def lifespan(app: FastAPI): # Startup logging.info(">>>>> starting up <<<<<") + ap_logger = logging.getLogger('apscheduler') + ap_logger.setLevel(loglevel) + app.schedule = AsyncIOScheduler() app.queue_system = queue.Queue() await pg_client.init() From a0ba2feea2abfa4de2d780e8831f59c56f1ea1c4 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 14 Mar 2023 18:11:02 +0100 Subject: [PATCH 26/60] feat(chalice): fixed health-check --- api/chalicelib/core/health.py | 6 +----- ee/api/chalicelib/core/health.py | 10 +++------- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/api/chalicelib/core/health.py b/api/chalicelib/core/health.py index 6f236a0f5..f4e2abc6c 100644 --- a/api/chalicelib/core/health.py +++ b/api/chalicelib/core/health.py @@ -142,10 +142,6 @@ def __check_redis(): } -def __check_assist(): - pass - - def get_health(): health_map = { "databases": { @@ -157,7 +153,7 @@ def get_health(): "backendServices": { "alerts": __check_be_service("alerts"), "assets": __check_be_service("assets"), - "assist": __check_assist, + "assist": __check_be_service("assist"), "chalice": __always_healthy_with_version, "db": __check_be_service("db"), "ender": __check_be_service("ender"), diff --git a/ee/api/chalicelib/core/health.py b/ee/api/chalicelib/core/health.py index 514425ddf..80abdacd7 100644 --- a/ee/api/chalicelib/core/health.py +++ b/ee/api/chalicelib/core/health.py @@ -30,7 +30,7 @@ if config("LOCAL_DEV", cast=bool, default=False): else: HEALTH_ENDPOINTS = { - "alerts": "http://alerts-openreplay.app.svc.cluster.local:8888/metrics", + "alerts": "http://alerts-openreplay.app.svc.cluster.local:8888/health", "assets": "http://assets-openreplay.app.svc.cluster.local:8888/metrics", "assist": "http://assist-openreplay.app.svc.cluster.local:8888/health", "chalice": "http://chalice-openreplay.app.svc.cluster.local:8888/metrics", @@ -45,7 +45,7 @@ else: "quickwit": "http://quickwit-openreplay.app.svc.cluster.local:8888/metrics", "sink": "http://sink-openreplay.app.svc.cluster.local:8888/metrics", "sourcemapreader": "http://sourcemapreader-openreplay.app.svc.cluster.local:8888/health", - "storage": "http://storage-openreplay.app.svc.cluster.local:8888/metrics" + "storage": "http://storage-openreplay.app.svc.cluster.local:8888/metrics", } @@ -144,10 +144,6 @@ def __check_redis(): } -def __check_assist(): - pass - - def get_health(): health_map = { "databases": { @@ -161,7 +157,7 @@ def get_health(): "backendServices": { "alerts": __check_be_service("alerts"), "assets": __check_be_service("assets"), - "assist": __check_assist, + "assist": __check_be_service("assist"), "chalice": __always_healthy_with_version, "db": __check_be_service("db"), "ender": __check_be_service("ender"), From ac617f36eec24590ac2fd22109205d2d6f4d4b2b Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Mar 2023 12:29:54 +0100 Subject: [PATCH 27/60] feat(DB): support new issue type --- ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index 22d2e804e..cb40061d5 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -79,7 +79,7 @@ CREATE TABLE IF NOT EXISTS experimental.events success Nullable(UInt8), request_body Nullable(String), response_body Nullable(String), - issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19)), + issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20)), issue_id Nullable(String), error_tags_keys Array(String), error_tags_values Array(Nullable(String)), From 2fd52f46282a9f8eb3e324688c40bc28fe14cf6d Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Mar 2023 12:33:59 +0100 Subject: [PATCH 28/60] feat(DB): support new issue type --- ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql b/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql index 5e9c11242..2ae951a50 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql @@ -1 +1,4 @@ -CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.11.0-ee'; \ No newline at end of file +CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.11.0-ee'; + +ALTER TABLE experimental.events + MODIFY COLUMN issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20)); From c9c50d6650b68f16747f1b4351e1af4ed973b600 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 15 Mar 2023 18:06:30 +0100 Subject: [PATCH 29/60] feat(DB): support new issue type --- .../schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql | 4 ++++ .../schema/db/init_dbs/clickhouse/create/init_schema.sql | 2 +- .../schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql | 9 +++++---- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql b/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql index 2ae951a50..1962fde10 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/1.11.0/1.11.0.sql @@ -2,3 +2,7 @@ CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.11.0-ee'; ALTER TABLE experimental.events MODIFY COLUMN issue_type Nullable(Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20)); + +ALTER TABLE experimental.issues + MODIFY COLUMN type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20); + diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index cb40061d5..9536307d8 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -201,7 +201,7 @@ CREATE TABLE IF NOT EXISTS experimental.issues ( project_id UInt16, issue_id String, - type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19), + type Enum8('click_rage'=1,'dead_click'=2,'excessive_scrolling'=3,'bad_request'=4,'missing_resource'=5,'memory'=6,'cpu'=7,'slow_resource'=8,'slow_page_load'=9,'crash'=10,'ml_cpu'=11,'ml_memory'=12,'ml_dead_click'=13,'ml_click_rage'=14,'ml_mouse_thrashing'=15,'ml_excessive_scrolling'=16,'ml_slow_resources'=17,'custom'=18,'js_exception'=19,'mouse_thrashing'=20), context_string String, context_keys Array(String), context_values Array(Nullable(String)), diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql index 30bb27997..cf80dcad1 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql @@ -5,14 +5,10 @@ $$ SELECT 'v1.11.0-ee' $$ LANGUAGE sql IMMUTABLE; - - ALTER TABLE events.inputs ADD COLUMN duration integer NULL, ADD COLUMN hesitation integer NULL; - - ALTER TABLE public.projects ALTER COLUMN gdpr SET DEFAULT '{ "maskEmails": true, @@ -21,4 +17,9 @@ ALTER TABLE public.projects "defaultInputMode": "obscured" }'::jsonb; +ALTER TYPE issue_type ADD VALUE IF NOT EXISTS 'mouse_thrashing'; + +ALTER TABLE events.clicks + ADD COLUMN hesitation integer NULL; + COMMIT; \ No newline at end of file From 9774c823c3c8203c31c9bee1a07a70081f22ce6a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Mar 2023 17:40:54 +0100 Subject: [PATCH 30/60] feat(chalice): changed assist-credentials response --- ee/api/chalicelib/utils/assist_helper.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/ee/api/chalicelib/utils/assist_helper.py b/ee/api/chalicelib/utils/assist_helper.py index d182226c0..061b329ef 100644 --- a/ee/api/chalicelib/utils/assist_helper.py +++ b/ee/api/chalicelib/utils/assist_helper.py @@ -37,13 +37,16 @@ def get_full_config(): if __get_secret() is not None: for i in range(len(servers)): url = servers[i].split(",")[0] - servers[i] = {"url": url} if url.lower().startswith("stun") else {"url": url, **credentials} + # servers[i] = {"url": url} if url.lower().startswith("stun") else {"url": url, **credentials} + servers[i] = {"urls": url} if url.lower().startswith("stun") else {"urls": url, **credentials} else: for i in range(len(servers)): s = servers[i].split(",") if len(s) == 3: - servers[i] = {"url": s[0], "username": s[1], "credential": s[2]} + # servers[i] = {"url": s[0], "username": s[1], "credential": s[2]} + servers[i] = {"urls": s[0], "username": s[1], "credential": s[2]} else: - servers[i] = {"url": s[0]} + # servers[i] = {"url": s[0]} + servers[i] = {"urls": s[0]} return servers From de7257b6c44088a00d61bf7359f796f759c7ac30 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 17 Mar 2023 17:54:24 +0100 Subject: [PATCH 31/60] feat(chalice): ignore kafka health-check --- ee/api/chalicelib/core/health.py | 4 ++-- ee/api/requirements.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ee/api/chalicelib/core/health.py b/ee/api/chalicelib/core/health.py index 80abdacd7..e00747288 100644 --- a/ee/api/chalicelib/core/health.py +++ b/ee/api/chalicelib/core/health.py @@ -2,7 +2,7 @@ from urllib.parse import urlparse import redis import requests -from confluent_kafka.admin import AdminClient +# from confluent_kafka.admin import AdminClient from decouple import config from chalicelib.utils import pg_client, ch_client @@ -152,7 +152,7 @@ def get_health(): }, "ingestionPipeline": { "redis": __check_redis, - "kafka": __check_kafka + # "kafka": __check_kafka }, "backendServices": { "alerts": __check_be_service("alerts"), diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index cad05e873..0ba6659c0 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -19,4 +19,4 @@ python3-saml==1.15.0 python-multipart==0.0.5 redis==4.5.1 -confluent-kafka==2.0.2 \ No newline at end of file +#confluent-kafka==2.0.2 \ No newline at end of file From e69bcbeff8482a62d033c798a923a4c3a47e92cb Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 20 Mar 2023 10:25:33 +0100 Subject: [PATCH 32/60] feat(chalice): fixed new replay response --- ee/api/chalicelib/core/sessions_replay.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ee/api/chalicelib/core/sessions_replay.py b/ee/api/chalicelib/core/sessions_replay.py index 993855637..319eb13b6 100644 --- a/ee/api/chalicelib/core/sessions_replay.py +++ b/ee/api/chalicelib/core/sessions_replay.py @@ -133,7 +133,8 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat else: data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id) data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id) - data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id) + data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id, + context=context) data['metadata'] = __group_metadata(project_metadata=data.pop("projectMetadata"), session=data) data['live'] = live and assist.is_live(project_id=project_id, session_id=session_id, From 13986a3c55b6423256a3746036945ef2bbe52c22 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 20 Mar 2023 13:39:50 +0100 Subject: [PATCH 33/60] feat(DB): stop upgrade on version hopping --- .../db/init_dbs/postgresql/1.11.0/1.11.0.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.11.0/1.11.0.sql | 17 +++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql index cf80dcad1..21544f62c 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.10.0-ee'; + next_version CONSTANT text := 'v1.11.0-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql b/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql index 41521a886..0fde93c48 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.11.0/1.11.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.10.0'; + next_version CONSTANT text := 'v1.11.0'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS From e4a3062423ed3aabfe7d8cb3ee0df84fa5e5ca6f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 20 Mar 2023 13:31:35 +0100 Subject: [PATCH 34/60] feat(DB): stop upgrade on version hopping --- .../db/init_dbs/postgresql/1.10.0/1.10.0.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.5.0/1.5.0.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.5.1/1.5.1.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.5.2/1.5.2.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.5.3/1.5.3.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.5.4/1.5.4.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.6.0/1.6.0.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.8.0/1.8.0.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.8.1/1.8.1.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.9.0/1.9.0.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.10.0/1.10.0.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.5.0/1.5.0.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.5.1/1.5.1.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.5.2/1.5.2.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.5.3/1.5.3.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.5.4/1.5.4.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.6.0/1.6.0.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.7.0/1.7.0.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.8.0/1.8.0.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.8.1/1.8.1.sql | 17 +++++++++++++++++ .../db/init_dbs/postgresql/1.9.0/1.9.0.sql | 17 +++++++++++++++++ 22 files changed, 374 insertions(+) diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index 044b24176..173114586 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.9.0-ee'; + next_version CONSTANT text := 'v1.10.0-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.5.0/1.5.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.5.0/1.5.0.sql index 03af6067f..a73032e41 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.5.0/1.5.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.5.0/1.5.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.4.0-ee'; + next_version CONSTANT text := 'v1.5.0-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.5.1/1.5.1.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.5.1/1.5.1.sql index de01efd53..e5f19444c 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.5.1/1.5.1.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.5.1/1.5.1.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.5.0-ee'; + next_version CONSTANT text := 'v1.5.1-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.5.2/1.5.2.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.5.2/1.5.2.sql index 8ec804b02..675ecb871 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.5.2/1.5.2.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.5.2/1.5.2.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.5.1-ee'; + next_version CONSTANT text := 'v1.5.2-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.5.3/1.5.3.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.5.3/1.5.3.sql index 0d68e46b9..55494b9c9 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.5.3/1.5.3.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.5.3/1.5.3.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.5.2-ee'; + next_version CONSTANT text := 'v1.5.3-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.5.4/1.5.4.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.5.4/1.5.4.sql index 1a640b4be..fcb9cd832 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.5.4/1.5.4.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.5.4/1.5.4.sql @@ -1,4 +1,21 @@ \set ON_ERROR_STOP true +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.5.3-ee'; + next_version CONSTANT text := 'v1.5.4-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + SET client_min_messages TO NOTICE; BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.6.0/1.6.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.6.0/1.6.0.sql index bb0d7b7c0..1f545b1d6 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.6.0/1.6.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.6.0/1.6.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.5.4-ee'; + next_version CONSTANT text := 'v1.6.0-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.7.0/1.7.0.sql index cdf316fa4..fc62e3767 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.6.0-ee'; + next_version CONSTANT text := 'v1.7.0-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.8.0/1.8.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.8.0/1.8.0.sql index 8347a5c78..aeb87b3ff 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.8.0/1.8.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.8.0/1.8.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.7.0-ee'; + next_version CONSTANT text := 'v1.8.0-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.8.1/1.8.1.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.8.1/1.8.1.sql index f02f9e0a8..59c8819a7 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.8.1/1.8.1.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.8.1/1.8.1.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.8.0-ee'; + next_version CONSTANT text := 'v1.8.1-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql index c3483579d..8e969aeb6 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.8.1-ee'; + next_version CONSTANT text := 'v1.9.0-ee'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index f2fb3f839..3a7344837 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.9.0'; + next_version CONSTANT text := 'v1.10.0'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/scripts/schema/db/init_dbs/postgresql/1.5.0/1.5.0.sql b/scripts/schema/db/init_dbs/postgresql/1.5.0/1.5.0.sql index fa72d27da..f0a3203b7 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.5.0/1.5.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.5.0/1.5.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.4.0'; + next_version CONSTANT text := 'v1.5.0'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/scripts/schema/db/init_dbs/postgresql/1.5.1/1.5.1.sql b/scripts/schema/db/init_dbs/postgresql/1.5.1/1.5.1.sql index 92fc44afe..c637b072d 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.5.1/1.5.1.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.5.1/1.5.1.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.5.0'; + next_version CONSTANT text := 'v1.5.1'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/scripts/schema/db/init_dbs/postgresql/1.5.2/1.5.2.sql b/scripts/schema/db/init_dbs/postgresql/1.5.2/1.5.2.sql index f4e26f93a..a94c1c3e9 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.5.2/1.5.2.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.5.2/1.5.2.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.5.1'; + next_version CONSTANT text := 'v1.5.2'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/scripts/schema/db/init_dbs/postgresql/1.5.3/1.5.3.sql b/scripts/schema/db/init_dbs/postgresql/1.5.3/1.5.3.sql index 159761b74..5b5bb92c4 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.5.3/1.5.3.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.5.3/1.5.3.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.5.2'; + next_version CONSTANT text := 'v1.5.3'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/scripts/schema/db/init_dbs/postgresql/1.5.4/1.5.4.sql b/scripts/schema/db/init_dbs/postgresql/1.5.4/1.5.4.sql index e7be94997..68aa530d3 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.5.4/1.5.4.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.5.4/1.5.4.sql @@ -1,4 +1,21 @@ \set ON_ERROR_STOP true +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.5.3'; + next_version CONSTANT text := 'v1.5.4'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + SET client_min_messages TO NOTICE; BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() diff --git a/scripts/schema/db/init_dbs/postgresql/1.6.0/1.6.0.sql b/scripts/schema/db/init_dbs/postgresql/1.6.0/1.6.0.sql index d11cad5be..cf5c88eab 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.6.0/1.6.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.6.0/1.6.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.5.4'; + next_version CONSTANT text := 'v1.6.0'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/scripts/schema/db/init_dbs/postgresql/1.7.0/1.7.0.sql b/scripts/schema/db/init_dbs/postgresql/1.7.0/1.7.0.sql index edc751da9..88a7acca7 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.7.0/1.7.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.7.0/1.7.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.6.0'; + next_version CONSTANT text := 'v1.7.0'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/scripts/schema/db/init_dbs/postgresql/1.8.0/1.8.0.sql b/scripts/schema/db/init_dbs/postgresql/1.8.0/1.8.0.sql index b14b14f91..93e05f01f 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.8.0/1.8.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.8.0/1.8.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.7.0'; + next_version CONSTANT text := 'v1.8.0'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/scripts/schema/db/init_dbs/postgresql/1.8.1/1.8.1.sql b/scripts/schema/db/init_dbs/postgresql/1.8.1/1.8.1.sql index c621da9c7..3b906c9ea 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.8.1/1.8.1.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.8.1/1.8.1.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.8.0'; + next_version CONSTANT text := 'v1.8.1'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS diff --git a/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql b/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql index 20a8bab1f..1b483432c 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.9.0/1.9.0.sql @@ -1,3 +1,20 @@ +DO +$$ + DECLARE + previous_version CONSTANT text := 'v1.8.1'; + next_version CONSTANT text := 'v1.9.0'; + BEGIN + IF (SELECT openreplay_version()) = previous_version THEN + raise notice 'valid previous DB version'; + ELSEIF (SELECT openreplay_version()) = next_version THEN + raise notice 'new version detected, nothing to do'; + ELSE + RAISE EXCEPTION 'upgrade to % failed, invalid previous version, expected %, got %', next_version,previous_version,(SELECT openreplay_version()); + END IF; + END ; +$$ +LANGUAGE plpgsql; + BEGIN; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS From 18d569c12b0032edd61f687cfa1227c32616fdfa Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 20 Mar 2023 14:51:58 +0100 Subject: [PATCH 35/60] feat(chalice): upgraded fastapi --- api/requirements.txt | 2 +- ee/api/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/requirements.txt b/api/requirements.txt index 27b95f17e..490a147df 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -8,7 +8,7 @@ jira==3.4.1 -fastapi==0.94.1 +fastapi==0.95.0 uvicorn[standard]==0.20.0 python-decouple==3.7 pydantic[email]==1.10.4 diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 0ba6659c0..b5da59c4b 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -8,7 +8,7 @@ jira==3.4.1 -fastapi==0.94.1 +fastapi==0.95.0 uvicorn[standard]==0.20.0 python-decouple==3.7 pydantic[email]==1.10.4 From f8737b84f732dc14efcefcf4f6d2bd4721f5f912 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 21 Mar 2023 13:16:06 +0100 Subject: [PATCH 36/60] feat(chalice): changed clickhouse-client config --- ee/api/chalicelib/utils/ch_client.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/utils/ch_client.py b/ee/api/chalicelib/utils/ch_client.py index 576bbc590..ef1839189 100644 --- a/ee/api/chalicelib/utils/ch_client.py +++ b/ee/api/chalicelib/utils/ch_client.py @@ -20,8 +20,9 @@ class ClickHouseClient: def __init__(self): self.__client = clickhouse_driver.Client(host=config("ch_host"), - database=config("ch_database",default="default", cast=str), - password=config("ch_password",default="", cast=str), + database=config("ch_database", default="default"), + user=config("ch_user", default="default"), + password=config("ch_password", default=""), port=config("ch_port", cast=int), settings=settings) \ if self.__client is None else self.__client From 788a70fc62a7f1c9b7505daf17eebafaea6f5e98 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 21 Mar 2023 16:33:13 +0100 Subject: [PATCH 37/60] feat(peers): upgraded dependencies --- peers/package-lock.json | 8 ++++---- peers/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/peers/package-lock.json b/peers/package-lock.json index b18dca820..fd230847f 100644 --- a/peers/package-lock.json +++ b/peers/package-lock.json @@ -10,7 +10,7 @@ "license": "Elastic License 2.0 (ELv2)", "dependencies": { "express": "^4.18.2", - "peer": "^v1.0.0-rc.9" + "peer": "^v1.0.0" } }, "node_modules/@types/body-parser": { @@ -57,9 +57,9 @@ "integrity": "sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==" }, "node_modules/@types/node": { - "version": "18.15.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.1.tgz", - "integrity": "sha512-U2TWca8AeHSmbpi314QBESRk7oPjSZjDsR+c+H4ECC1l+kFgpZf8Ydhv3SJpPy51VyZHHqxlb6mTTqYNNRVAIw==" + "version": "18.15.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.5.tgz", + "integrity": "sha512-Ark2WDjjZO7GmvsyFFf81MXuGTA/d6oP38anyxWOL6EREyBKAxKoFHwBhaZxCfLRLpO8JgVXwqOwSwa7jRcjew==" }, "node_modules/@types/qs": { "version": "6.9.7", diff --git a/peers/package.json b/peers/package.json index 82fd0ddf6..d77cf5910 100644 --- a/peers/package.json +++ b/peers/package.json @@ -19,6 +19,6 @@ "homepage": "https://github.com/openreplay/openreplay#readme", "dependencies": { "express": "^4.18.2", - "peer": "^v1.0.0-rc.9" + "peer": "^v1.0.0" } } From d86b0939f7f808a3f053637cb64e6fe1cab75384 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Tue, 21 Mar 2023 16:40:45 +0100 Subject: [PATCH 38/60] feat(peers): changed build script --- peers/build.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/peers/build.sh b/peers/build.sh index 746a12f9d..0f01a292f 100644 --- a/peers/build.sh +++ b/peers/build.sh @@ -23,6 +23,7 @@ function build_api(){ cp -R ../peers ../${destination} cd ../${destination} cp -R ../assist/utils . + cp ../sourcemap-reader/utils/health.js ./utils/. # Copy enterprise code [[ $1 == "ee" ]] && { cp -rf ../ee/peers/* ./ From 03b92e891058eaf32b0932531c5057e22d1c47f6 Mon Sep 17 00:00:00 2001 From: Rajesh Rajendran Date: Thu, 23 Mar 2023 18:20:49 +0100 Subject: [PATCH 39/60] CLI improvements (#1059) * chore(cli): Adding option to install OpenReplay Signed-off-by: rjshrjndrn * chore(cli): Don't install if existing OR found Signed-off-by: rjshrjndrn * chore(cli): Adding data cleanup option Signed-off-by: rjshrjndrn * chore(cli): Cleanup data minio Signed-off-by: rjshrjndrn * chore(cli): Adding info for cleanup Signed-off-by: rjshrjndrn * chore(cli): remove unnecessary logs. Signed-off-by: rjshrjndrn * chore(cli): Adding confirmation message Signed-off-by: rjshrjndrn * chore(cli): Clenaup comments Signed-off-by: rjshrjndrn --------- Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay-cli | 91 +++++++++++++++++++++++++++++-- 1 file changed, 86 insertions(+), 5 deletions(-) diff --git a/scripts/helmcharts/openreplay-cli b/scripts/helmcharts/openreplay-cli index 52a6b28b2..4cde9a54d 100755 --- a/scripts/helmcharts/openreplay-cli +++ b/scripts/helmcharts/openreplay-cli @@ -15,6 +15,9 @@ tmp_dir=$(mktemp -d) sudo mkdir $OR_DIR } export PATH=/var/lib/openreplay:$PATH +function xargs() { + /var/lib/openreplay/busybox xargs +} tools=( zyedidia/eget @@ -114,10 +117,12 @@ echo -e ${NC} log info ' Usage: openreplay [ -h | --help ] [ -s | --status ] + [ -i | --install DOMAIN_NAME ] [ -u | --upgrade ] [ -U | --deprecated-upgrade /path/to/old_vars.yaml] [ -r | --restart ] [ -R | --Reload ] + [ -c | --cleanup N(in days) ] [ -e | --edit ] [ -p | --install-packages ] [ -l | --logs SERVICE ] @@ -184,6 +189,73 @@ function upgrade_old() { upgrade } +function clone_repo() { + err_cd "$tmp_dir" + log info "Working directory $tmp_dir" + git_options="-b ${OR_VERSION:-main}" + eval git clone "${OR_REPO}" --depth 1 $git_options + return +} + +function install() { + domain_name=$1 + # Check existing installation + [[ -f ${OR_DIR}/vars.yaml ]] && { + or_version=$(busybox awk '/fromVersion/{print $2}' < "${OR_DIR}/vars.yaml") + log err "Openreplay installation ${BWHITE}${or_version}${RED} found. If you want to upgrade, run ${BWHITE}openreplay -u${RED}" + } + # Installing OR + log title "Installing OpenReplay" + clone_repo + err_cd "$tmp_dir/openreplay/scripts/helmcharts" + DOMAIN_NAME=$domain_name bash init.sh + return +} + +function cleanup() { + # Confirmation for deletion. Do you want to delete Postgres/Minio(session) data before $date ? + delete_from_number_days=$1 + delete_from_date=$(date +%Y-%m-%d -d "$delete_from_number_days day ago") + log debug "Do you want to delete the data captured on and before ${BWHITE}$delete_from_date${YELLOW}?" + read -p "Are you sure[y/n]? " -n 1 -r + echo # (optional) move to a new line + if [[ ! $REPLY =~ ^[Yy]$ ]]; then + log err "Cancelling data deletion" + fi + + # Run pg cleanup + pguser=$(awk '/postgresqlUser/{print $2}' < "${OR_DIR}/vars.yaml" | xargs) + pgpassword=$(awk '/postgresqlPassword/{print $2}' < "${OR_DIR}/vars.yaml" | xargs) + pghost=$(awk '/postgresqlHost/{print $2}' < "${OR_DIR}/vars.yaml" | xargs) + pgport=$(awk '/postgresqlPort/{print $2}' < "${OR_DIR}/vars.yaml" | xargs) + pgdatabase=$(awk '/postgresqlDatabase/{print $2}' < "${OR_DIR}/vars.yaml" | xargs) + kubectl delete po -n ${APP_NS} pg-cleanup &> /dev/null || true + kubectl run pg-cleanup -n ${APP_NS} \ + --restart=Never \ + --env PGHOST=$pghost \ + --env PGUSER=$pguser \ + --env PGDATABASE=$pgdatabase \ + --env PGPASSWORD=$pgpassword \ + --env PGPORT=$pgport \ + --image bitnami/postgresql -- psql -c "DELETE FROM public.sessions WHERE start_ts < extract(epoch from '${delete_from_date}'::date) * 1000;" + # Run minio cleanup + MINIO_ACCESS_KEY=$(awk '/accessKey/{print $NF}' < "${OR_DIR}/vars.yaml" | tail -n1 | xargs) + MINIO_SECRET_KEY=$(awk '/secretKey/{print $NF}' < "${OR_DIR}/vars.yaml" | tail -n1 | xargs) + MINIO_HOST=$(awk '/endpoint/{print $NF}' < "${OR_DIR}/vars.yaml" | tail -n1 | xargs) + kubectl delete po -n ${APP_NS} minio-cleanup &> /dev/null || true + kubectl run minio-cleanup -n ${APP_NS} \ + --restart=Never \ + --env MINIO_HOST=$pghost \ + --image bitnami/minio:2020.10.9-debian-10-r6 -- /bin/sh -c " + mc alias set minio $MINIO_HOST $MINIO_ACCESS_KEY $MINIO_SECRET_KEY && + mc rm --recursive --dangerous --force --older-than ${delete_from_number_days}d minio/mobs + " + log info "Postgres data cleanup process initiated. Postgres will automatically vacuum deleted rows when the database is idle. This may take up a few days to free the disk space." + log info "Minio (where recordings are stored) cleanup process initiated." + log info "Run ${BWHITE}openreplay -s${GREEN} to check the status of the cleanup process and available disk space." + return +} + function upgrade() { # TODO: # 1. store vars.yaml in central place. @@ -191,15 +263,12 @@ function upgrade() { # 3. How to update package. Because openreplay -u will be done from old update script # 4. Update from Version exists git || log err "Git not found. Please install" - log info "Working directory $tmp_dir" - err_cd "$tmp_dir" or_version=$(busybox awk '/fromVersion/{print $2}' < "${OR_DIR}/vars.yaml") # Creating backup dir of current installation [[ -d "$OR_DIR/openreplay" ]] && sudo cp -rfb "$OR_DIR/openreplay" "$OR_DIR/openreplay_${or_version//\"}" && sudo rm -rf ${OR_DIR}/openreplay - git_options="-b ${OR_VERSION:-main}" - eval git clone "${OR_REPO}" --depth 1 $git_options + clone_repo err_cd openreplay/scripts/helmcharts install_packages [[ -d /openreplay ]] && sudo chown -R 1001:1001 /openreplay @@ -239,7 +308,7 @@ function clean_tmp_dir() { install_packages } -PARSED_ARGUMENTS=$(busybox getopt -a -n openreplay -o Rrevpiuhsl:U: --long reload,edit,restart,verbose,install-packages,install,upgrade,help,status,logs,deprecated-upgrade: -- "$@") +PARSED_ARGUMENTS=$(busybox getopt -a -n openreplay -o Rrevpi:uhsl:U:c: --long reload,edit,restart,verbose,install-packages,install:,upgrade,help,status,logs,deprecated-upgrade:,cleanup: -- "$@") VALID_ARGUMENTS=$? if [[ "$VALID_ARGUMENTS" != "0" ]]; then help @@ -256,6 +325,12 @@ do clean_tmp_dir exit 0 ;; + -i | --install) + log title "Installing OpenReplay" + install "$2" + clean_tmp_dir + exit 0 + ;; -u | --upgrade) log title "Upgrading OpenReplay" upgrade @@ -268,6 +343,12 @@ do clean_tmp_dir exit 0 ;; + -c | --cleanup) + log title "Cleaning up data older than $2 days" + cleanup "$2" + clean_tmp_dir + exit 0 + ;; -r | --restart) log title "Restarting OpenReplay Components" kubectl rollout restart deployment -n "${APP_NS}" From 09e788a29fe7440bfd7a1d1ebfd3c000b03d7649 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Fri, 24 Mar 2023 10:13:52 +0100 Subject: [PATCH 40/60] change(ui): testing more optimisations --- .../app/player/web/managers/DOM/DOMManager.ts | 31 ++++++++++++++++--- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/frontend/app/player/web/managers/DOM/DOMManager.ts b/frontend/app/player/web/managers/DOM/DOMManager.ts index ec51401f6..ab839118b 100644 --- a/frontend/app/player/web/managers/DOM/DOMManager.ts +++ b/frontend/app/player/web/managers/DOM/DOMManager.ts @@ -257,9 +257,13 @@ export default class DOMManager extends ListWalker { } return case MType.RemoveNodeAttribute: - vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } - vn.removeAttribute(msg.name) + if (isJump) { + this.attrsBacktrack = this.attrsBacktrack.filter(m => m.id !== msg.id && m.name !== msg.name) + } else { + vn = this.vElements.get(msg.id) + if (!vn) { logger.error("Node not found", msg); return } + vn.removeAttribute(msg.name) + } return case MType.SetInputValue: vn = this.vElements.get(msg.id) @@ -472,7 +476,25 @@ export default class DOMManager extends ListWalker { * are applied, so it won't try to download and then cancel when node is created in msg N and removed in msg N+2 * which produces weird bug when asset is cached (10-25ms delay) * */ - await this.moveWait(t, (msg) => this.applyMessage(msg, isJump)) + // http://0.0.0.0:3333/5/session/8452905874437457 + // 70 iframe, 8 create element - STYLE tag + console.time('moveWait') + let t0 = performance.now() + let t1 = t0 + const timings = [] + await this.moveWait(t, (msg) => { + t0 = performance.now() + this.applyMessage(msg, isJump) + t1 = performance.now() + timings.push({ t: t1 - t0, m: msg.tp, msg }) + }) + + console.timeEnd('moveWait') + console.log( + timings.sort((a, b) => b.t - a.t), + timings.filter(t => t.msg.tag === 'STYLE').length, + ) + if (isJump) { this.attrsBacktrack.forEach(msg => { this.applyBacktrack(msg) @@ -480,7 +502,6 @@ export default class DOMManager extends ListWalker { this.attrsBacktrack = [] } this.vRoots.forEach(rt => rt.applyChanges()) // MBTODO (optimisation): affected set - // Thinkabout (read): css preload // What if we go back before it is ready? We'll have two handlres? return this.stylesManager.moveReady(t).then(() => { From b2ce9a7aff8a9eb7035eeba2a4435b35fc39e6b2 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Fri, 17 Feb 2023 12:30:20 +0100 Subject: [PATCH 41/60] feat(ui): health status widget --- frontend/app/components/Header/Header.js | 3 + .../HealthStatus/HealthModal/Footer.tsx | 38 ++++ .../HealthStatus/HealthModal/HealthModal.tsx | 66 ++++++ .../Header/HealthStatus/HealthStatus.tsx | 92 +++++++++ .../components/Header/HealthStatus/index.ts | 1 + .../BugReport/components/MetaInfo.tsx | 4 +- frontend/app/components/ui/SVG.tsx | 4 +- frontend/app/svg/cheers.svg | 193 ++++++++++++++++++ .../app/svg/icons/exclamation-circle-fill.svg | 10 + frontend/app/svg/icons/pulse.svg | 3 + frontend/app/theme/colors.js | 1 + 11 files changed, 411 insertions(+), 4 deletions(-) create mode 100644 frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx create mode 100644 frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx create mode 100644 frontend/app/components/Header/HealthStatus/HealthStatus.tsx create mode 100644 frontend/app/components/Header/HealthStatus/index.ts create mode 100644 frontend/app/svg/cheers.svg create mode 100644 frontend/app/svg/icons/exclamation-circle-fill.svg create mode 100644 frontend/app/svg/icons/pulse.svg diff --git a/frontend/app/components/Header/Header.js b/frontend/app/components/Header/Header.js index 7ef0028c9..021f96df3 100644 --- a/frontend/app/components/Header/Header.js +++ b/frontend/app/components/Header/Header.js @@ -19,6 +19,7 @@ import UserMenu from './UserMenu'; import SettingsMenu from './SettingsMenu'; import DefaultMenuView from './DefaultMenuView'; import PreferencesView from './PreferencesView'; +import HealthStatus from './HealthStatus' const CLIENT_PATH = client(CLIENT_DEFAULT_TAB); @@ -78,6 +79,8 @@ const Header = (props) => { + +
diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx new file mode 100644 index 000000000..43bd434fc --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx @@ -0,0 +1,38 @@ +import React from 'react'; +import { Icon } from 'UI'; + +function Footer() { + return ( + + ); +} + +export default Footer; diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx new file mode 100644 index 000000000..86b597ed7 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx @@ -0,0 +1,66 @@ +import React from 'react'; +import slide from 'App/svg/cheers.svg'; +import { Icon, Button } from 'UI'; +import Footer from './Footer' + +function Category({ name, healthOk }: { name: string; healthOk: boolean }) { + const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); + return ( +
+ + {name} +
+ ) +} + +function HealthModal({ healthOk }: { healthOk: boolean }) { + + return ( +
+
+
Installation Status
+ +
+ +
+
+ + + + +
+
+ +
+
+
+ +
+
+
+ ); +} + +export default HealthModal; diff --git a/frontend/app/components/Header/HealthStatus/HealthStatus.tsx b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx new file mode 100644 index 000000000..24e4c281e --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx @@ -0,0 +1,92 @@ +import React from 'react'; +import { Icon, Tooltip } from 'UI'; +import cn from 'classnames'; +import HealthModal from 'Components/Header/HealthStatus/HealthModal/HealthModal'; + +function HealthStatus() { + const [healthOk, setHealth] = React.useState(false); + + const icon = healthOk ? 'pulse' : ('exclamation-circle-fill' as const); + return ( +
+
+
+ +
+
+ + + +
+ ); +} + +function HealthMenu({ healthOk, setHealth }: { healthOk: boolean; setHealth: any }) { + const title = healthOk ? 'All Systems Operational' : 'Service disruption'; + const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); + return ( +
+
+
+ + {title} +
+
+ Last checked 22 mins. ago +
setHealth(!healthOk)}> + +
+
+
+ +
+
+
Version
+
+ 123 123 +
+
+ + {healthOk ? ( + <> +
+
Sessions
+
+ 10 000 +
+
+
+
Events
+
+ 90 000 +
+
+ + ) : ( +
Observed installation Issue with the following
+ )} +
+
+
+ ); +} + +export default HealthStatus; diff --git a/frontend/app/components/Header/HealthStatus/index.ts b/frontend/app/components/Header/HealthStatus/index.ts new file mode 100644 index 000000000..1f4ce8576 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/index.ts @@ -0,0 +1 @@ +export { default } from './HealthStatus' \ No newline at end of file diff --git a/frontend/app/components/Session_/BugReport/components/MetaInfo.tsx b/frontend/app/components/Session_/BugReport/components/MetaInfo.tsx index 09746dcfb..2eecf4ea7 100644 --- a/frontend/app/components/Session_/BugReport/components/MetaInfo.tsx +++ b/frontend/app/components/Session_/BugReport/components/MetaInfo.tsx @@ -24,11 +24,9 @@ export default function MetaInfo({ {Object.keys(envObject).map((envTag) => (
{envTag}
-
- +
{/* @ts-ignore */} {envObject[envTag]} -
))} diff --git a/frontend/app/components/ui/SVG.tsx b/frontend/app/components/ui/SVG.tsx index eeaeae5dd..95254b16c 100644 --- a/frontend/app/components/ui/SVG.tsx +++ b/frontend/app/components/ui/SVG.tsx @@ -1,7 +1,7 @@ import React from 'react'; -export type IconNames = 'activity' | 'alarm-clock' | 'alarm-plus' | 'all-sessions' | 'analytics' | 'anchor' | 'arrow-alt-square-right' | 'arrow-bar-left' | 'arrow-clockwise' | 'arrow-counterclockwise' | 'arrow-down-short' | 'arrow-down' | 'arrow-repeat' | 'arrow-right-short' | 'arrow-square-left' | 'arrow-square-right' | 'arrow-up-short' | 'arrow-up' | 'arrows-angle-extend' | 'avatar/icn_bear' | 'avatar/icn_beaver' | 'avatar/icn_bird' | 'avatar/icn_bison' | 'avatar/icn_camel' | 'avatar/icn_chameleon' | 'avatar/icn_deer' | 'avatar/icn_dog' | 'avatar/icn_dolphin' | 'avatar/icn_elephant' | 'avatar/icn_fish' | 'avatar/icn_fox' | 'avatar/icn_gorilla' | 'avatar/icn_hippo' | 'avatar/icn_horse' | 'avatar/icn_hyena' | 'avatar/icn_kangaroo' | 'avatar/icn_lemur' | 'avatar/icn_mammel' | 'avatar/icn_monkey' | 'avatar/icn_moose' | 'avatar/icn_panda' | 'avatar/icn_penguin' | 'avatar/icn_porcupine' | 'avatar/icn_quail' | 'avatar/icn_rabbit' | 'avatar/icn_rhino' | 'avatar/icn_sea_horse' | 'avatar/icn_sheep' | 'avatar/icn_snake' | 'avatar/icn_squirrel' | 'avatar/icn_tapir' | 'avatar/icn_turtle' | 'avatar/icn_vulture' | 'avatar/icn_wild1' | 'avatar/icn_wild_bore' | 'ban' | 'bar-chart-line' | 'bar-pencil' | 'bell-fill' | 'bell-plus' | 'bell-slash' | 'bell' | 'binoculars' | 'book' | 'browser/browser' | 'browser/chrome' | 'browser/edge' | 'browser/electron' | 'browser/facebook' | 'browser/firefox' | 'browser/ie' | 'browser/opera' | 'browser/safari' | 'bullhorn' | 'business-time' | 'calendar-alt' | 'calendar-check' | 'calendar-day' | 'calendar' | 'call' | 'camera-alt' | 'camera-video-off' | 'camera-video' | 'camera' | 'card-checklist' | 'card-text' | 'caret-down-fill' | 'caret-left-fill' | 'caret-right-fill' | 'caret-up-fill' | 'chat-dots' | 'chat-right-text' | 'chat-square-quote' | 'check-circle-fill' | 'check-circle' | 'check' | 'chevron-double-left' | 'chevron-double-right' | 'chevron-down' | 'chevron-left' | 'chevron-right' | 'chevron-up' | 'circle-fill' | 'circle' | 'click-hesitation' | 'click-rage' | 'clipboard-list-check' | 'clock' | 'close' | 'cloud-fog2-fill' | 'code' | 'cog' | 'cogs' | 'collection' | 'columns-gap-filled' | 'columns-gap' | 'console/error' | 'console/exception' | 'console/info' | 'console/warning' | 'console' | 'controller' | 'cookies' | 'copy' | 'credit-card-front' | 'cross' | 'cubes' | 'cursor-trash' | 'dash' | 'dashboard-icn' | 'desktop' | 'device' | 'diagram-3' | 'dizzy' | 'door-closed' | 'doublecheck' | 'download' | 'drag' | 'edit' | 'ellipsis-v' | 'enter' | 'envelope' | 'errors-icon' | 'event/click' | 'event/click_hesitation' | 'event/clickrage' | 'event/code' | 'event/i-cursor' | 'event/input' | 'event/input_hesitation' | 'event/link' | 'event/location' | 'event/mouse_thrashing' | 'event/resize' | 'event/view' | 'exclamation-circle' | 'expand-wide' | 'explosion' | 'external-link-alt' | 'eye-slash-fill' | 'eye-slash' | 'eye' | 'fetch' | 'file-code' | 'file-medical-alt' | 'file-pdf' | 'file' | 'files' | 'filter' | 'filters/arrow-return-right' | 'filters/browser' | 'filters/click' | 'filters/clickrage' | 'filters/code' | 'filters/console' | 'filters/country' | 'filters/cpu-load' | 'filters/custom' | 'filters/device' | 'filters/dom-complete' | 'filters/duration' | 'filters/error' | 'filters/fetch-failed' | 'filters/fetch' | 'filters/file-code' | 'filters/graphql' | 'filters/i-cursor' | 'filters/input' | 'filters/lcpt' | 'filters/link' | 'filters/location' | 'filters/memory-load' | 'filters/metadata' | 'filters/os' | 'filters/perfromance-network-request' | 'filters/platform' | 'filters/referrer' | 'filters/resize' | 'filters/rev-id' | 'filters/state-action' | 'filters/ttfb' | 'filters/user-alt' | 'filters/userid' | 'filters/view' | 'flag-na' | 'folder-plus' | 'folder2' | 'fullscreen' | 'funnel/cpu-fill' | 'funnel/cpu' | 'funnel/dizzy' | 'funnel/emoji-angry-fill' | 'funnel/emoji-angry' | 'funnel/emoji-dizzy-fill' | 'funnel/exclamation-circle-fill' | 'funnel/exclamation-circle' | 'funnel/file-earmark-break-fill' | 'funnel/file-earmark-break' | 'funnel/file-earmark-minus-fill' | 'funnel/file-earmark-minus' | 'funnel/file-medical-alt' | 'funnel/file-x' | 'funnel/hdd-fill' | 'funnel/hourglass-top' | 'funnel/image-fill' | 'funnel/image' | 'funnel/microchip' | 'funnel/mouse' | 'funnel/patch-exclamation-fill' | 'funnel/sd-card' | 'funnel-fill' | 'funnel-new' | 'funnel' | 'gear-fill' | 'gear' | 'geo-alt-fill-custom' | 'github' | 'graph-up-arrow' | 'graph-up' | 'grid-1x2' | 'grid-3x3' | 'grid-check' | 'grid-horizontal' | 'grid' | 'grip-horizontal' | 'hash' | 'hdd-stack' | 'headset' | 'heart-rate' | 'high-engagement' | 'history' | 'hourglass-start' | 'ic-errors' | 'ic-network' | 'ic-rage' | 'ic-resources' | 'id-card' | 'image' | 'info-circle-fill' | 'info-circle' | 'info-square' | 'info' | 'input-hesitation' | 'inspect' | 'integrations/assist' | 'integrations/bugsnag-text' | 'integrations/bugsnag' | 'integrations/cloudwatch-text' | 'integrations/cloudwatch' | 'integrations/datadog' | 'integrations/elasticsearch-text' | 'integrations/elasticsearch' | 'integrations/github' | 'integrations/graphql' | 'integrations/jira-text' | 'integrations/jira' | 'integrations/mobx' | 'integrations/newrelic-text' | 'integrations/newrelic' | 'integrations/ngrx' | 'integrations/openreplay-text' | 'integrations/openreplay' | 'integrations/redux' | 'integrations/rollbar-text' | 'integrations/rollbar' | 'integrations/segment' | 'integrations/sentry-text' | 'integrations/sentry' | 'integrations/slack-bw' | 'integrations/slack' | 'integrations/stackdriver' | 'integrations/sumologic-text' | 'integrations/sumologic' | 'integrations/teams-white' | 'integrations/teams' | 'integrations/vuejs' | 'journal-code' | 'layer-group' | 'lightbulb-on' | 'lightbulb' | 'link-45deg' | 'list-alt' | 'list-arrow' | 'list-ul' | 'list' | 'lock-alt' | 'magic' | 'map-marker-alt' | 'memory' | 'mic-mute' | 'mic' | 'minus' | 'mobile' | 'mouse-alt' | 'network' | 'next1' | 'no-dashboard' | 'no-metrics-chart' | 'no-metrics' | 'no-recordings' | 'os/android' | 'os/chrome_os' | 'os/fedora' | 'os/ios' | 'os/linux' | 'os/mac_os_x' | 'os/other' | 'os/ubuntu' | 'os/windows' | 'os' | 'pause-fill' | 'pause' | 'pdf-download' | 'pencil-stop' | 'pencil' | 'percent' | 'performance-icon' | 'person-fill' | 'person' | 'pie-chart-fill' | 'pin-fill' | 'play-circle-bold' | 'play-circle-light' | 'play-circle' | 'play-fill-new' | 'play-fill' | 'play-hover' | 'play' | 'plus-circle' | 'plus-lg' | 'plus' | 'pointer-sessions-search' | 'prev1' | 'puzzle-piece' | 'puzzle' | 'question-circle' | 'question-lg' | 'quote-left' | 'quote-right' | 'quotes' | 'record-circle' | 'redo-back' | 'redo' | 'remote-control' | 'replay-10' | 'resources-icon' | 'safe-fill' | 'safe' | 'sandglass' | 'search' | 'search_notification' | 'server' | 'share-alt' | 'shield-lock' | 'signpost-split' | 'signup' | 'skip-forward-fill' | 'skip-forward' | 'slack' | 'slash-circle' | 'sliders' | 'social/slack' | 'social/trello' | 'speedometer2' | 'spinner' | 'star-solid' | 'star' | 'step-forward' | 'stop-record-circle' | 'stopwatch' | 'store' | 'sync-alt' | 'table-new' | 'table' | 'tablet-android' | 'tachometer-slow' | 'tachometer-slowest' | 'tags' | 'team-funnel' | 'telephone-fill' | 'telephone' | 'text-paragraph' | 'tools' | 'trash' | 'turtle' | 'user-alt' | 'user-circle' | 'user-friends' | 'users' | 'vendors/graphql' | 'vendors/mobx' | 'vendors/ngrx' | 'vendors/redux' | 'vendors/vuex' | 'web-vitals' | 'wifi' | 'window-alt' | 'window-restore' | 'window-x' | 'window' | 'zoom-in'; +export type IconNames = 'activity' | 'alarm-clock' | 'alarm-plus' | 'all-sessions' | 'analytics' | 'anchor' | 'arrow-alt-square-right' | 'arrow-bar-left' | 'arrow-clockwise' | 'arrow-counterclockwise' | 'arrow-down-short' | 'arrow-down' | 'arrow-repeat' | 'arrow-right-short' | 'arrow-square-left' | 'arrow-square-right' | 'arrow-up-short' | 'arrow-up' | 'arrows-angle-extend' | 'avatar/icn_bear' | 'avatar/icn_beaver' | 'avatar/icn_bird' | 'avatar/icn_bison' | 'avatar/icn_camel' | 'avatar/icn_chameleon' | 'avatar/icn_deer' | 'avatar/icn_dog' | 'avatar/icn_dolphin' | 'avatar/icn_elephant' | 'avatar/icn_fish' | 'avatar/icn_fox' | 'avatar/icn_gorilla' | 'avatar/icn_hippo' | 'avatar/icn_horse' | 'avatar/icn_hyena' | 'avatar/icn_kangaroo' | 'avatar/icn_lemur' | 'avatar/icn_mammel' | 'avatar/icn_monkey' | 'avatar/icn_moose' | 'avatar/icn_panda' | 'avatar/icn_penguin' | 'avatar/icn_porcupine' | 'avatar/icn_quail' | 'avatar/icn_rabbit' | 'avatar/icn_rhino' | 'avatar/icn_sea_horse' | 'avatar/icn_sheep' | 'avatar/icn_snake' | 'avatar/icn_squirrel' | 'avatar/icn_tapir' | 'avatar/icn_turtle' | 'avatar/icn_vulture' | 'avatar/icn_wild1' | 'avatar/icn_wild_bore' | 'ban' | 'bar-chart-line' | 'bar-pencil' | 'bell-fill' | 'bell-plus' | 'bell-slash' | 'bell' | 'binoculars' | 'book' | 'browser/browser' | 'browser/chrome' | 'browser/edge' | 'browser/electron' | 'browser/facebook' | 'browser/firefox' | 'browser/ie' | 'browser/opera' | 'browser/safari' | 'bullhorn' | 'business-time' | 'calendar-alt' | 'calendar-check' | 'calendar-day' | 'calendar' | 'call' | 'camera-alt' | 'camera-video-off' | 'camera-video' | 'camera' | 'card-checklist' | 'card-text' | 'caret-down-fill' | 'caret-left-fill' | 'caret-right-fill' | 'caret-up-fill' | 'chat-dots' | 'chat-right-text' | 'chat-square-quote' | 'check-circle-fill' | 'check-circle' | 'check' | 'chevron-double-left' | 'chevron-double-right' | 'chevron-down' | 'chevron-left' | 'chevron-right' | 'chevron-up' | 'circle-fill' | 'circle' | 'click-hesitation' | 'click-rage' | 'clipboard-list-check' | 'clock' | 'close' | 'cloud-fog2-fill' | 'code' | 'cog' | 'cogs' | 'collection' | 'columns-gap-filled' | 'columns-gap' | 'console/error' | 'console/exception' | 'console/info' | 'console/warning' | 'console' | 'controller' | 'cookies' | 'copy' | 'credit-card-front' | 'cross' | 'cubes' | 'cursor-trash' | 'dash' | 'dashboard-icn' | 'desktop' | 'device' | 'diagram-3' | 'dizzy' | 'door-closed' | 'doublecheck' | 'download' | 'drag' | 'edit' | 'ellipsis-v' | 'enter' | 'envelope' | 'errors-icon' | 'event/click' | 'event/click_hesitation' | 'event/clickrage' | 'event/code' | 'event/i-cursor' | 'event/input' | 'event/input_hesitation' | 'event/link' | 'event/location' | 'event/mouse_thrashing' | 'event/resize' | 'event/view' | 'exclamation-circle-fill' | 'exclamation-circle' | 'expand-wide' | 'explosion' | 'external-link-alt' | 'eye-slash-fill' | 'eye-slash' | 'eye' | 'fetch' | 'file-code' | 'file-medical-alt' | 'file-pdf' | 'file' | 'files' | 'filter' | 'filters/arrow-return-right' | 'filters/browser' | 'filters/click' | 'filters/clickrage' | 'filters/code' | 'filters/console' | 'filters/country' | 'filters/cpu-load' | 'filters/custom' | 'filters/device' | 'filters/dom-complete' | 'filters/duration' | 'filters/error' | 'filters/fetch-failed' | 'filters/fetch' | 'filters/file-code' | 'filters/graphql' | 'filters/i-cursor' | 'filters/input' | 'filters/lcpt' | 'filters/link' | 'filters/location' | 'filters/memory-load' | 'filters/metadata' | 'filters/os' | 'filters/perfromance-network-request' | 'filters/platform' | 'filters/referrer' | 'filters/resize' | 'filters/rev-id' | 'filters/state-action' | 'filters/ttfb' | 'filters/user-alt' | 'filters/userid' | 'filters/view' | 'flag-na' | 'folder-plus' | 'folder2' | 'fullscreen' | 'funnel/cpu-fill' | 'funnel/cpu' | 'funnel/dizzy' | 'funnel/emoji-angry-fill' | 'funnel/emoji-angry' | 'funnel/emoji-dizzy-fill' | 'funnel/exclamation-circle-fill' | 'funnel/exclamation-circle' | 'funnel/file-earmark-break-fill' | 'funnel/file-earmark-break' | 'funnel/file-earmark-minus-fill' | 'funnel/file-earmark-minus' | 'funnel/file-medical-alt' | 'funnel/file-x' | 'funnel/hdd-fill' | 'funnel/hourglass-top' | 'funnel/image-fill' | 'funnel/image' | 'funnel/microchip' | 'funnel/mouse' | 'funnel/patch-exclamation-fill' | 'funnel/sd-card' | 'funnel-fill' | 'funnel-new' | 'funnel' | 'gear-fill' | 'gear' | 'geo-alt-fill-custom' | 'github' | 'graph-up-arrow' | 'graph-up' | 'grid-1x2' | 'grid-3x3' | 'grid-check' | 'grid-horizontal' | 'grid' | 'grip-horizontal' | 'hash' | 'hdd-stack' | 'headset' | 'heart-rate' | 'high-engagement' | 'history' | 'hourglass-start' | 'ic-errors' | 'ic-network' | 'ic-rage' | 'ic-resources' | 'id-card' | 'image' | 'info-circle-fill' | 'info-circle' | 'info-square' | 'info' | 'input-hesitation' | 'inspect' | 'integrations/assist' | 'integrations/bugsnag-text' | 'integrations/bugsnag' | 'integrations/cloudwatch-text' | 'integrations/cloudwatch' | 'integrations/datadog' | 'integrations/elasticsearch-text' | 'integrations/elasticsearch' | 'integrations/github' | 'integrations/graphql' | 'integrations/jira-text' | 'integrations/jira' | 'integrations/mobx' | 'integrations/newrelic-text' | 'integrations/newrelic' | 'integrations/ngrx' | 'integrations/openreplay-text' | 'integrations/openreplay' | 'integrations/redux' | 'integrations/rollbar-text' | 'integrations/rollbar' | 'integrations/segment' | 'integrations/sentry-text' | 'integrations/sentry' | 'integrations/slack-bw' | 'integrations/slack' | 'integrations/stackdriver' | 'integrations/sumologic-text' | 'integrations/sumologic' | 'integrations/teams-white' | 'integrations/teams' | 'integrations/vuejs' | 'journal-code' | 'layer-group' | 'lightbulb-on' | 'lightbulb' | 'link-45deg' | 'list-alt' | 'list-arrow' | 'list-ul' | 'list' | 'lock-alt' | 'magic' | 'map-marker-alt' | 'memory' | 'mic-mute' | 'mic' | 'minus' | 'mobile' | 'mouse-alt' | 'network' | 'next1' | 'no-dashboard' | 'no-metrics-chart' | 'no-metrics' | 'no-recordings' | 'os/android' | 'os/chrome_os' | 'os/fedora' | 'os/ios' | 'os/linux' | 'os/mac_os_x' | 'os/other' | 'os/ubuntu' | 'os/windows' | 'os' | 'pause-fill' | 'pause' | 'pdf-download' | 'pencil-stop' | 'pencil' | 'percent' | 'performance-icon' | 'person-fill' | 'person' | 'pie-chart-fill' | 'pin-fill' | 'play-circle-bold' | 'play-circle-light' | 'play-circle' | 'play-fill-new' | 'play-fill' | 'play-hover' | 'play' | 'plus-circle' | 'plus-lg' | 'plus' | 'pointer-sessions-search' | 'prev1' | 'pulse' | 'puzzle-piece' | 'puzzle' | 'question-circle' | 'question-lg' | 'quote-left' | 'quote-right' | 'quotes' | 'record-circle' | 'redo-back' | 'redo' | 'remote-control' | 'replay-10' | 'resources-icon' | 'safe-fill' | 'safe' | 'sandglass' | 'search' | 'search_notification' | 'server' | 'share-alt' | 'shield-lock' | 'signpost-split' | 'signup' | 'skip-forward-fill' | 'skip-forward' | 'slack' | 'slash-circle' | 'sliders' | 'social/slack' | 'social/trello' | 'speedometer2' | 'spinner' | 'star-solid' | 'star' | 'step-forward' | 'stop-record-circle' | 'stopwatch' | 'store' | 'sync-alt' | 'table-new' | 'table' | 'tablet-android' | 'tachometer-slow' | 'tachometer-slowest' | 'tags' | 'team-funnel' | 'telephone-fill' | 'telephone' | 'text-paragraph' | 'tools' | 'trash' | 'turtle' | 'user-alt' | 'user-circle' | 'user-friends' | 'users' | 'vendors/graphql' | 'vendors/mobx' | 'vendors/ngrx' | 'vendors/redux' | 'vendors/vuex' | 'web-vitals' | 'wifi' | 'window-alt' | 'window-restore' | 'window-x' | 'window' | 'zoom-in'; interface Props { name: IconNames; @@ -170,6 +170,7 @@ const SVG = (props: Props) => { case 'event/mouse_thrashing': return ; case 'event/resize': return ; case 'event/view': return ; + case 'exclamation-circle-fill': return ; case 'exclamation-circle': return ; case 'expand-wide': return ; case 'explosion': return ; @@ -368,6 +369,7 @@ const SVG = (props: Props) => { case 'plus': return ; case 'pointer-sessions-search': return ; case 'prev1': return ; + case 'pulse': return ; case 'puzzle-piece': return ; case 'puzzle': return ; case 'question-circle': return ; diff --git a/frontend/app/svg/cheers.svg b/frontend/app/svg/cheers.svg new file mode 100644 index 000000000..1341b27a2 --- /dev/null +++ b/frontend/app/svg/cheers.svg @@ -0,0 +1,193 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/app/svg/icons/exclamation-circle-fill.svg b/frontend/app/svg/icons/exclamation-circle-fill.svg new file mode 100644 index 000000000..eebbd6833 --- /dev/null +++ b/frontend/app/svg/icons/exclamation-circle-fill.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/frontend/app/svg/icons/pulse.svg b/frontend/app/svg/icons/pulse.svg new file mode 100644 index 000000000..5075d1cab --- /dev/null +++ b/frontend/app/svg/icons/pulse.svg @@ -0,0 +1,3 @@ + + + diff --git a/frontend/app/theme/colors.js b/frontend/app/theme/colors.js index 3986cf3d5..d9671f6fe 100644 --- a/frontend/app/theme/colors.js +++ b/frontend/app/theme/colors.js @@ -57,5 +57,6 @@ module.exports = { 'text-disabled': 'rgba(0,0,0, 0.38)', 'text-primary': 'rgba(0,0,0, 0.87)', 'outlined-border': 'rgba(0,0,0, 0.23)', + 'divider': 'rgba(0, 0, 0, 0.12)', } } From 0f1232f3a78d575b373b7eeb0ff72f803036dc1f Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Fri, 17 Feb 2023 13:02:39 +0100 Subject: [PATCH 42/60] feat(ui): change hovers --- .../Header/HealthStatus/HealthModal/HealthModal.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx index 86b597ed7..5cce8dc9b 100644 --- a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx @@ -7,10 +7,12 @@ function Category({ name, healthOk }: { name: string; healthOk: boolean }) { const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); return (
{name} + +
) } From f3efa296df3cdacf503533caff1d6021c4703864 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Fri, 17 Feb 2023 13:03:34 +0100 Subject: [PATCH 43/60] feat(ui): remove warnings --- .../components/Header/HealthStatus/HealthModal/HealthModal.tsx | 3 ++- frontend/app/components/Header/HealthStatus/HealthStatus.tsx | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx index 5cce8dc9b..85cfd2dcf 100644 --- a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx @@ -1,4 +1,5 @@ import React from 'react'; +// @ts-ignore import slide from 'App/svg/cheers.svg'; import { Icon, Button } from 'UI'; import Footer from './Footer' @@ -17,7 +18,7 @@ function Category({ name, healthOk }: { name: string; healthOk: boolean }) { ) } -function HealthModal({ healthOk }: { healthOk: boolean }) { +function HealthModal() { return (
Date: Tue, 21 Feb 2023 15:08:41 +0100 Subject: [PATCH 44/60] change(ui): mock back response --- .../Header/HealthStatus/HealthModal/mock.ts | 187 ++++++++++++++++++ .../Header/HealthStatus/HealthStatus.tsx | 34 ++++ 2 files changed, 221 insertions(+) create mode 100644 frontend/app/components/Header/HealthStatus/HealthModal/mock.ts diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/mock.ts b/frontend/app/components/Header/HealthStatus/HealthModal/mock.ts new file mode 100644 index 000000000..8211126f6 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthModal/mock.ts @@ -0,0 +1,187 @@ +export const response = { + databases: { + postgres: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + clickhouse: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + }, + ingestionPipeline: { + redis: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + kafka: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + }, + backendServices: { + alerts: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + assets: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + assist: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + chalice: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + db: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + ender: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + frontend: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + heuristics: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + http: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + 'ingress-nginx': { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + integrations: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + peers: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + quickwit: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + sink: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + sourcemapreader: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + storage: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + utilities: { + health: true, + details: { + version: 'v1.13', + schema: 'v1.10', + lastUpdatedOn: '12jan2023', + }, + }, + }, + overall: { + health: true, + details: { + numberOfEventCaptured: 123000, + numberOfSessionsCaptured: 25678, + }, + labels: { + parent: 'information', + }, + }, + ssl: true, +}; diff --git a/frontend/app/components/Header/HealthStatus/HealthStatus.tsx b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx index 5296594c2..361e49be3 100644 --- a/frontend/app/components/Header/HealthStatus/HealthStatus.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx @@ -2,6 +2,40 @@ import React from 'react'; import { Icon } from 'UI'; import cn from 'classnames'; import HealthModal from 'Components/Header/HealthStatus/HealthModal/HealthModal'; +import { response } from './HealthModal/mock' + +function mapResponse(resp) { + const dbKeys = Object.keys(resp.databases) + const ingestKeys = Object.keys(resp.ingestionPipeline) + const backendKeys = Object.keys(resp.backendServices) + + if (!resp.overall.health) { + const dbHealth: Record = { + overall: true, + } + const ingestHealth: Record = { + overall: true, + } + const backHealth: Record = { + overall: true, + } + dbKeys.forEach(key => { + const dbStatus = resp.databases[key].health + if (!dbStatus) dbHealth.overall = false + dbHealth[key] = resp.databases.key + }) + ingestKeys.forEach(key => { + const ingestStatus = resp.ingestionPipeline[key].health + if (!ingestStatus) ingestHealth.overall = false + ingestHealth[key] = resp.ingestionPipeline.key + }) + backendKeys.forEach(key => { + const backendStatus = resp.backendServices[key].health + if (!backendStatus) backHealth.overall = false + backHealth[key] = resp.backendServices.key + }) + } +} function HealthStatus() { const [healthOk, setHealth] = React.useState(false); From 620489e57d17dc9e81a48e2a1ea4507e70578db5 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 13 Mar 2023 16:06:58 +0100 Subject: [PATCH 45/60] change(ui): remove mock, connect api to health status comp --- .../HealthStatus/HealthModal/HealthModal.tsx | 16 +- .../Header/HealthStatus/HealthModal/mock.ts | 187 ------------------ .../Header/HealthStatus/HealthStatus.tsx | 179 +++++++++++------ frontend/app/services/HealthService.ts | 10 + frontend/app/services/index.ts | 4 + 5 files changed, 141 insertions(+), 255 deletions(-) delete mode 100644 frontend/app/components/Header/HealthStatus/HealthModal/mock.ts create mode 100644 frontend/app/services/HealthService.ts diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx index 85cfd2dcf..e9aef0e71 100644 --- a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx @@ -4,11 +4,12 @@ import slide from 'App/svg/cheers.svg'; import { Icon, Button } from 'UI'; import Footer from './Footer' -function Category({ name, healthOk }: { name: string; healthOk: boolean }) { +export function Category({ name, healthOk, onClick }: { name: string; healthOk: boolean; onClick: (args: any) => void }) { const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); return (
{name} @@ -18,7 +19,7 @@ function Category({ name, healthOk }: { name: string; healthOk: boolean }) { ) } -function HealthModal() { +function HealthModal({ getHealth, isLoading, healthResponse }: { getHealth: () => void; isLoading: boolean; healthResponse: Record }) { return (
Installation Status
-
@@ -46,8 +47,8 @@ function HealthModal() {
- - + + {/**/}
= { - overall: true, - } - const ingestHealth: Record = { - overall: true, - } - const backHealth: Record = { - overall: true, - } - dbKeys.forEach(key => { - const dbStatus = resp.databases[key].health - if (!dbStatus) dbHealth.overall = false - dbHealth[key] = resp.databases.key - }) - ingestKeys.forEach(key => { - const ingestStatus = resp.ingestionPipeline[key].health - if (!ingestStatus) ingestHealth.overall = false - ingestHealth[key] = resp.ingestionPipeline.key - }) - backendKeys.forEach(key => { - const backendStatus = resp.backendServices[key].health - if (!backendStatus) backHealth.overall = false - backHealth[key] = resp.backendServices.key - }) - } +const categoryKeyNames = { + backendServices: 'Backend Services', + databases: 'Databases', + ingestionPipeline: 'Ingestion Pipeline', + ssl: 'SSL', } -function HealthStatus() { - const [healthOk, setHealth] = React.useState(false); +function mapResponse(resp: Record) { + const services = Object.keys(resp); + const healthMap: Record = {} + services.forEach(service => { + healthMap[service] = { + // @ts-ignore + name: categoryKeyNames[service], + healthOk: true, + subservices: resp[service], + } + Object.values(healthMap[service].subservices).forEach((subservice: Record) => { + if (!subservice?.health) healthMap[service].healthOk = false; + }) + }) - const icon = healthOk ? 'pulse' : ('exclamation-circle-fill' as const); + const overallHealth = Object.values(healthMap).every((service: Record) => service.healthOk); + + return { overallHealth, healthMap } +} + + +function HealthStatus() { + const lastAskedKey = '__openreplay_health_status'; + const healthResponseKey = '__openreplay_health_response'; + const healthResponseSaved = localStorage.getItem(healthResponseKey) || '{}'; + const [healthResponse, setHealthResponse] = React.useState(JSON.parse(healthResponseSaved)); + const [isLoading, setIsLoading] = React.useState(false); + const lastAskedSaved = localStorage.getItem(lastAskedKey); + const [lastAsked, setLastAsked] = React.useState(lastAskedSaved); + const [showModal, setShowModal] = React.useState(false); + + const getHealth = async () => { + if (isLoading) return; + try { + setIsLoading(true); + const r = await healthService.fetchStatus(); + const healthMap = mapResponse(r) + setHealthResponse(healthMap); + const asked = new Date().getTime(); + localStorage.setItem(healthResponseKey, JSON.stringify(healthMap)) + localStorage.setItem(lastAskedKey, asked.toString()); + setLastAsked(asked.toString()); + } catch (e) { + console.error(e); + } finally { + setIsLoading(false); + } + }; + + React.useEffect(() => { + const now = new Date(); + const lastAskedDate = lastAsked ? new Date(parseInt(lastAsked, 10)) : null; + const diff = lastAskedDate ? now.getTime() - lastAskedDate.getTime() : 0; + const diffInMinutes = Math.round(diff / 1000 / 60); + if (Object.keys(healthResponse).length === 0 || !lastAskedDate || diffInMinutes > 10) { + void getHealth(); + } + }, []); + + const icon = healthResponse?.overallHealth ? 'pulse' : ('exclamation-circle-fill' as const); return (
- - + + {showModal ? () : null}
); } -function HealthMenu({ healthOk, setHealth }: { healthOk: boolean; setHealth: any }) { +function HealthMenu({ + healthResponse, + getHealth, + isLoading, + lastAsked, + setShowModal, +}: { + healthResponse: Record; + getHealth: Function; + isLoading: boolean; + lastAsked: string | null; + setShowModal: (visible: boolean) => void; +}) { + const [lastAskedDiff, setLastAskedDiff] = React.useState(0); + const healthOk = healthResponse?.overallHealth; + + React.useEffect(() => { + const now = new Date(); + const lastAskedDate = lastAsked ? new Date(parseInt(lastAsked, 10)) : null; + const diff = lastAskedDate ? now.getTime() - lastAskedDate.getTime() : 0; + const diffInMinutes = Math.round(diff / 1000 / 60); + setLastAskedDiff(diffInMinutes); + }, [lastAsked]); + const title = healthOk ? 'All Systems Operational' : 'Service disruption'; const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); + + const problematicServices = Object.values(healthResponse?.healthMap || {}).filter( + (service: Record) => !service.healthOk + ) as Record[]; return (
{title}
- Last checked 22 mins. ago -
setHealth(!healthOk)}> + Last checked {lastAskedDiff} mins. ago +
getHealth()} + >
-
-
Version
-
- 123 123 -
-
+ {/*
*/} + {/*
Version
*/} + {/*
*/} + {/* 123 123*/} + {/*
*/} + {/*
*/} - {healthOk ? ( + {!healthOk ? ( <> -
-
Sessions
-
- 10 000 -
-
-
-
Events
-
- 90 000 -
-
+
Observed installation Issue with the following
+ {problematicServices.map(service => setShowModal(true)} healthOk={false} name={service.name} />)} - ) : ( -
Observed installation Issue with the following
- )} + ) : null}
diff --git a/frontend/app/services/HealthService.ts b/frontend/app/services/HealthService.ts new file mode 100644 index 000000000..019863bb3 --- /dev/null +++ b/frontend/app/services/HealthService.ts @@ -0,0 +1,10 @@ +import BaseService from './BaseService'; + +export default class HealthService extends BaseService { + fetchStatus(): Promise { + return this.client.get('/health') + .then(r => r.json()) + .then(j => j.data || {}) + .catch(Promise.reject) + } +} \ No newline at end of file diff --git a/frontend/app/services/index.ts b/frontend/app/services/index.ts index 816113e68..32e216127 100644 --- a/frontend/app/services/index.ts +++ b/frontend/app/services/index.ts @@ -10,6 +10,7 @@ import RecordingsService from "./RecordingsService"; import ConfigService from './ConfigService' import AlertsService from './AlertsService' import WebhookService from './WebhookService' +import HealthService from "./HealthService"; export const dashboardService = new DashboardService(); export const metricService = new MetricService(); @@ -24,6 +25,8 @@ export const configService = new ConfigService(); export const alertsService = new AlertsService(); export const webhookService = new WebhookService(); +export const healthService = new HealthService(); + export const services = [ dashboardService, metricService, @@ -37,4 +40,5 @@ export const services = [ configService, alertsService, webhookService, + healthService, ] \ No newline at end of file From 12d7ff4f99a1f52ffe864a309df07250d89f5348 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Tue, 14 Mar 2023 12:17:50 +0100 Subject: [PATCH 46/60] change(ui): remove keys warning, create service health type, keep response in localstorage for 15 mins --- .../HealthStatus/HealthModal/HealthModal.tsx | 156 +++++++++++------ .../Header/HealthStatus/HealthStatus.tsx | 159 +++++------------- .../Header/HealthStatus/HealthWidget.tsx | 93 ++++++++++ .../Header/HealthStatus/ServiceCategory.tsx | 43 +++++ .../SubserviceHealth/SubserviceHealth.tsx | 52 ++++++ .../components/Header/HealthStatus/const.ts | 6 + 6 files changed, 346 insertions(+), 163 deletions(-) create mode 100644 frontend/app/components/Header/HealthStatus/HealthWidget.tsx create mode 100644 frontend/app/components/Header/HealthStatus/ServiceCategory.tsx create mode 100644 frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx create mode 100644 frontend/app/components/Header/HealthStatus/const.ts diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx index e9aef0e71..b0bbb8969 100644 --- a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx @@ -1,73 +1,131 @@ import React from 'react'; // @ts-ignore import slide from 'App/svg/cheers.svg'; -import { Icon, Button } from 'UI'; -import Footer from './Footer' +import { Button } from 'UI'; +import Footer from './Footer'; +import { getHighest } from 'App/constants/zindex'; +import Category from 'Components/Header/HealthStatus/ServiceCategory'; +import SubserviceHealth from 'Components/Header/HealthStatus/SubserviceHealth/SubserviceHealth'; +import { IServiceStats } from '../HealthStatus'; -export function Category({ name, healthOk, onClick }: { name: string; healthOk: boolean; onClick: (args: any) => void }) { - const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); - return ( -
- - {name} +function HealthModal({ + getHealth, + isLoading, + healthResponse, + setShowModal, +}: { + getHealth: () => void; + isLoading: boolean; + healthResponse: { overallHealth: boolean; healthMap: Record }; + setShowModal: (isOpen: boolean) => void; +}) { + const [selectedService, setSelectedService] = React.useState(''); - -
- ) -} - -function HealthModal({ getHealth, isLoading, healthResponse }: { getHealth: () => void; isLoading: boolean; healthResponse: Record }) { + React.useEffect(() => { + if (!healthResponse.overallHealth) { + setSelectedService( + Object.keys(healthResponse.healthMap).filter( + (s) => !healthResponse.healthMap[s].healthOk + )[0] + ); + } + }, [healthResponse]); + const handleClose = () => { + setShowModal(false); + }; return (
e.stopPropagation()} + className={'flex flex-col bg-white rounded border border-figmaColors-divider'} > -
Installation Status
- -
- -
-
- - - - {/**/} -
- +
Installation Status
+
+ +
+
+ {Object.keys(healthResponse.healthMap).map((service) => ( + + setSelectedService(service)} + healthOk={healthResponse.healthMap[service].healthOk} + name={healthResponse.healthMap[service].name} + isSelectable + isSelected={selectedService === service} + /> + + ))} +
+
+ {selectedService ? ( + + ) : ( + + )} +
+
+
+ +
+
-
- -
-
); } - - +function ServiceStatus({ service }: { service: Record }) { + const { subservices } = service; + return ( +
+
+ {Object.keys(subservices).map((subservice: string) => ( + + + + ))} +
+
+ ); +} export default HealthModal; diff --git a/frontend/app/components/Header/HealthStatus/HealthStatus.tsx b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx index 930419f63..7732bca9d 100644 --- a/frontend/app/components/Header/HealthStatus/HealthStatus.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx @@ -1,38 +1,46 @@ import React from 'react'; import { Icon } from 'UI'; -import cn from 'classnames'; -import HealthModal, { Category } from 'Components/Header/HealthStatus/HealthModal/HealthModal'; +import HealthModal from 'Components/Header/HealthStatus/HealthModal/HealthModal'; import { healthService } from 'App/services'; +import { categoryKeyNames } from './const'; +import HealthWidget from "Components/Header/HealthStatus/HealthWidget"; - -const categoryKeyNames = { - backendServices: 'Backend Services', - databases: 'Databases', - ingestionPipeline: 'Ingestion Pipeline', - ssl: 'SSL', +export interface IServiceStats { + name: 'backendServices' | 'databases' | 'ingestionPipeline' | 'ssl'; + serviceName: string; + healthOk: boolean; + subservices: { + health: boolean; + details?: { + errors?: string[]; + version?: string; + } + }[] } function mapResponse(resp: Record) { const services = Object.keys(resp); - const healthMap: Record = {} - services.forEach(service => { + const healthMap: Record = {}; + services.forEach((service) => { healthMap[service] = { // @ts-ignore name: categoryKeyNames[service], healthOk: true, subservices: resp[service], - } + serviceName: service, + }; Object.values(healthMap[service].subservices).forEach((subservice: Record) => { if (!subservice?.health) healthMap[service].healthOk = false; - }) - }) + }); + }); - const overallHealth = Object.values(healthMap).every((service: Record) => service.healthOk); + const overallHealth = Object.values(healthMap).every( + (service: Record) => service.healthOk + ); - return { overallHealth, healthMap } + return { overallHealth, healthMap }; } - function HealthStatus() { const lastAskedKey = '__openreplay_health_status'; const healthResponseKey = '__openreplay_health_response'; @@ -48,10 +56,10 @@ function HealthStatus() { try { setIsLoading(true); const r = await healthService.fetchStatus(); - const healthMap = mapResponse(r) + const healthMap = mapResponse(r); setHealthResponse(healthMap); const asked = new Date().getTime(); - localStorage.setItem(healthResponseKey, JSON.stringify(healthMap)) + localStorage.setItem(healthResponseKey, JSON.stringify(healthMap)); localStorage.setItem(lastAskedKey, asked.toString()); setLastAsked(asked.toString()); } catch (e) { @@ -73,109 +81,32 @@ function HealthStatus() { const icon = healthResponse?.overallHealth ? 'pulse' : ('exclamation-circle-fill' as const); return ( -
-
-
- -
-
- - - {showModal ? () : null} -
- ); -} - -function HealthMenu({ - healthResponse, - getHealth, - isLoading, - lastAsked, - setShowModal, -}: { - healthResponse: Record; - getHealth: Function; - isLoading: boolean; - lastAsked: string | null; - setShowModal: (visible: boolean) => void; -}) { - const [lastAskedDiff, setLastAskedDiff] = React.useState(0); - const healthOk = healthResponse?.overallHealth; - - React.useEffect(() => { - const now = new Date(); - const lastAskedDate = lastAsked ? new Date(parseInt(lastAsked, 10)) : null; - const diff = lastAskedDate ? now.getTime() - lastAskedDate.getTime() : 0; - const diffInMinutes = Math.round(diff / 1000 / 60); - setLastAskedDiff(diffInMinutes); - }, [lastAsked]); - - const title = healthOk ? 'All Systems Operational' : 'Service disruption'; - const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); - - const problematicServices = Object.values(healthResponse?.healthMap || {}).filter( - (service: Record) => !service.healthOk - ) as Record[]; - return ( -
-
+ <> +
- - {title} -
-
- Last checked {lastAskedDiff} mins. ago -
getHealth()} - > - +
+
-
-
- {/*
*/} - {/*
Version
*/} - {/*
*/} - {/* 123 123*/} - {/*
*/} - {/*
*/} - - {!healthOk ? ( - <> -
Observed installation Issue with the following
- {problematicServices.map(service => setShowModal(true)} healthOk={false} name={service.name} />)} - - ) : null} -
+
-
+ {showModal ? ( + + ) : null} + ); } + export default HealthStatus; diff --git a/frontend/app/components/Header/HealthStatus/HealthWidget.tsx b/frontend/app/components/Header/HealthStatus/HealthWidget.tsx new file mode 100644 index 000000000..c6372540b --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/HealthWidget.tsx @@ -0,0 +1,93 @@ +import React from 'react' +import { Icon } from "UI"; +import ServiceCategory from "Components/Header/HealthStatus/ServiceCategory"; +import cn from 'classnames' +import { IServiceStats } from './HealthStatus' + +function HealthWidget({ + healthResponse, + getHealth, + isLoading, + lastAsked, + setShowModal, +}: { + healthResponse: { overallHealth: boolean; healthMap: Record }; + getHealth: Function; + isLoading: boolean; + lastAsked: string | null; + setShowModal: (visible: boolean) => void; +}) { + const [lastAskedDiff, setLastAskedDiff] = React.useState(0); + const healthOk = healthResponse?.overallHealth; + + React.useEffect(() => { + const now = new Date(); + const lastAskedDate = lastAsked ? new Date(parseInt(lastAsked, 10)) : null; + const diff = lastAskedDate ? now.getTime() - lastAskedDate.getTime() : 0; + const diffInMinutes = Math.round(diff / 1000 / 60); + setLastAskedDiff(diffInMinutes); + }, [lastAsked]); + + const title = healthOk ? 'All Systems Operational' : 'Service disruption'; + const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); + + const problematicServices = Object.values(healthResponse?.healthMap || {}).filter( + (service: Record) => !service.healthOk + ) + + return ( +
+
+
+ + {title} +
+
+ Last checked {lastAskedDiff} mins. ago +
getHealth()} + > + +
+
+
+ +
+ {!healthOk ? ( + <> +
+ Observed installation Issue with the following +
+ {problematicServices.map((service) => ( + + setShowModal(true)} + healthOk={false} + name={service.name} + /> + + ))} + + ) : null} +
+
+
+ ); +} + +export default HealthWidget \ No newline at end of file diff --git a/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx b/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx new file mode 100644 index 000000000..3c9259c39 --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx @@ -0,0 +1,43 @@ +import { Icon } from 'UI'; +import React from 'react'; +import cn from 'classnames'; + +function Category({ + name, + healthOk, + onClick, + isSelectable, + isExpandable, + isExpanded, + isSelected, +}: { + name: string; + healthOk: boolean; + onClick: (args: any) => void; + isSelectable?: boolean; + isExpandable?: boolean; + isExpanded?: boolean; + isSelected?: boolean; +}) { + const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); + return ( +
+ + {name} + + {isSelectable ? : null} + {isExpandable ? ( + + ) : null} +
+ ); +} + +export default Category \ No newline at end of file diff --git a/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx b/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx new file mode 100644 index 000000000..4de64ffbe --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx @@ -0,0 +1,52 @@ +import React from 'react'; +import Category from 'Components/Header/HealthStatus/ServiceCategory'; +import cn from 'classnames'; + +function SubserviceHealth({ + subservice, + name, +}: { + name: string; + subservice: { health: boolean; details: { errors?: string[]; version?: string } }; +}) { + const [isExpanded, setIsExpanded] = React.useState(!subservice?.health); + + const isExpandable = subservice?.details && Object.keys(subservice?.details).length > 0; + return ( +
+ (isExpandable ? setIsExpanded(!isExpanded) : null)} + name={name} + healthOk={subservice?.health} + isExpandable={isExpandable} + isExpanded={isExpanded} + /> + {isExpanded ? ( +
+ {subservice?.details?.version ? ( +
+
Version
+
+ {subservice?.details?.version} +
+
+ ) : null} + {subservice?.details?.errors?.length ? ( +
+
Error log:
+ {subservice.details.errors.map((err: string, i) => ( +
+ {i + 1}. {err} +
+ ))} +
+ ) : subservice?.health ? null : ( + 'Service not responding' + )} +
+ ) : null} +
+ ); +} + +export default SubserviceHealth; diff --git a/frontend/app/components/Header/HealthStatus/const.ts b/frontend/app/components/Header/HealthStatus/const.ts new file mode 100644 index 000000000..3c13c52dd --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/const.ts @@ -0,0 +1,6 @@ +export const categoryKeyNames = { + backendServices: 'Backend Services', + databases: 'Databases', + ingestionPipeline: 'Ingestion Pipeline', + ssl: 'SSL', +} as const \ No newline at end of file From cbd4e4f6693b90e36dee1daabdc338bb712b05e7 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 15 Mar 2023 16:01:48 +0100 Subject: [PATCH 47/60] change(ui): small ui fixes --- .../components/Header/HealthStatus/HealthModal/HealthModal.tsx | 3 ++- frontend/app/components/Header/HealthStatus/HealthWidget.tsx | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx index b0bbb8969..a78ba4ad1 100644 --- a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx @@ -34,6 +34,7 @@ function HealthModal({ const handleClose = () => { setShowModal(false); }; + return (
-
diff --git a/frontend/app/components/Header/HealthStatus/HealthWidget.tsx b/frontend/app/components/Header/HealthStatus/HealthWidget.tsx index c6372540b..50b4de76d 100644 --- a/frontend/app/components/Header/HealthStatus/HealthWidget.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthWidget.tsx @@ -79,6 +79,7 @@ function HealthWidget({ onClick={() => setShowModal(true)} healthOk={false} name={service.name} + isSelectable /> ))} From b02bf8c23d794ca6b8d845a131d9f295d002483c Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Thu, 23 Mar 2023 13:52:08 +0100 Subject: [PATCH 48/60] change(ui): fix loading animation, add health modal to signup page (/cached in localstorage) --- .../HealthStatus/HealthModal/Footer.tsx | 9 +- .../HealthStatus/HealthModal/HealthModal.tsx | 85 ++++++++++++------- .../Header/HealthStatus/HealthStatus.tsx | 45 +++------- .../Header/HealthStatus/ServiceCategory.tsx | 10 ++- .../components/Header/HealthStatus/const.ts | 5 +- .../Header/HealthStatus/getHealth.ts | 36 ++++++++ frontend/app/components/Signup/Signup.js | 38 +++++++++ frontend/app/components/ui/SVG.tsx | 3 +- frontend/app/svg/icons/book-doc.svg | 10 +++ 9 files changed, 172 insertions(+), 69 deletions(-) create mode 100644 frontend/app/components/Header/HealthStatus/getHealth.ts create mode 100644 frontend/app/svg/icons/book-doc.svg diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx index 43bd434fc..0daf5cf56 100644 --- a/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthModal/Footer.tsx @@ -1,9 +1,14 @@ import React from 'react'; import { Icon } from 'UI'; +import cn from 'classnames' -function Footer() { +function Footer({ isSetup }: { isSetup?: boolean }) { return ( -
+
void; isLoading: boolean; healthResponse: { overallHealth: boolean; healthMap: Record }; setShowModal: (isOpen: boolean) => void; + setPassed?: () => void; }) { const [selectedService, setSelectedService] = React.useState(''); React.useEffect(() => { - if (!healthResponse.overallHealth) { - setSelectedService( - Object.keys(healthResponse.healthMap).filter( - (s) => !healthResponse.healthMap[s].healthOk - )[0] - ); + if (!healthResponse?.overallHealth) { + if (healthResponse?.healthMap) { + setSelectedService( + Object.keys(healthResponse.healthMap).filter( + (s) => !healthResponse.healthMap[s].healthOk + )[0] + ); + } } }, [healthResponse]); @@ -35,6 +40,8 @@ function HealthModal({ setShowModal(false); }; + const isSetup = document.location.pathname.includes('/signup') + return (
e.stopPropagation()} @@ -78,37 +85,57 @@ function HealthModal({
- {Object.keys(healthResponse.healthMap).map((service) => ( - - setSelectedService(service)} - healthOk={healthResponse.healthMap[service].healthOk} - name={healthResponse.healthMap[service].name} - isSelectable - isSelected={selectedService === service} - /> - - ))} + {isLoading ? ( + null} name={"Loading health status"} isLoading /> + ) + : Object.keys(healthResponse.healthMap).map((service) => ( + + setSelectedService(service)} + healthOk={healthResponse.healthMap[service].healthOk} + name={healthResponse.healthMap[service].name} + isSelectable + isSelected={selectedService === service} + /> + + ))}
- {selectedService ? ( + {isLoading ? ( +
+ +
+ ) : selectedService ? ( - ) : ( - - )} + ) : + }
-
- -
-
+ {isSetup ? ( +
+ +
+ ) : null} +
); diff --git a/frontend/app/components/Header/HealthStatus/HealthStatus.tsx b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx index 7732bca9d..317b36279 100644 --- a/frontend/app/components/Header/HealthStatus/HealthStatus.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx @@ -1,9 +1,9 @@ import React from 'react'; import { Icon } from 'UI'; import HealthModal from 'Components/Header/HealthStatus/HealthModal/HealthModal'; -import { healthService } from 'App/services'; -import { categoryKeyNames } from './const'; +import { lastAskedKey, healthResponseKey } from './const'; import HealthWidget from "Components/Header/HealthStatus/HealthWidget"; +import { getHealthRequest } from './getHealth' export interface IServiceStats { name: 'backendServices' | 'databases' | 'ingestionPipeline' | 'ssl'; @@ -18,32 +18,8 @@ export interface IServiceStats { }[] } -function mapResponse(resp: Record) { - const services = Object.keys(resp); - const healthMap: Record = {}; - services.forEach((service) => { - healthMap[service] = { - // @ts-ignore - name: categoryKeyNames[service], - healthOk: true, - subservices: resp[service], - serviceName: service, - }; - Object.values(healthMap[service].subservices).forEach((subservice: Record) => { - if (!subservice?.health) healthMap[service].healthOk = false; - }); - }); - - const overallHealth = Object.values(healthMap).every( - (service: Record) => service.healthOk - ); - - return { overallHealth, healthMap }; -} function HealthStatus() { - const lastAskedKey = '__openreplay_health_status'; - const healthResponseKey = '__openreplay_health_response'; const healthResponseSaved = localStorage.getItem(healthResponseKey) || '{}'; const [healthResponse, setHealthResponse] = React.useState(JSON.parse(healthResponseSaved)); const [isLoading, setIsLoading] = React.useState(false); @@ -55,12 +31,8 @@ function HealthStatus() { if (isLoading) return; try { setIsLoading(true); - const r = await healthService.fetchStatus(); - const healthMap = mapResponse(r); + const { healthMap, asked } = await getHealthRequest(); setHealthResponse(healthMap); - const asked = new Date().getTime(); - localStorage.setItem(healthResponseKey, JSON.stringify(healthMap)); - localStorage.setItem(lastAskedKey, asked.toString()); setLastAsked(asked.toString()); } catch (e) { console.error(e); @@ -82,10 +54,10 @@ function HealthStatus() { const icon = healthResponse?.overallHealth ? 'pulse' : ('exclamation-circle-fill' as const); return ( <> -
+
@@ -102,7 +74,12 @@ function HealthStatus() { />
{showModal ? ( - + ) : null} ); diff --git a/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx b/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx index 3c9259c39..be5edec1f 100644 --- a/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx +++ b/frontend/app/components/Header/HealthStatus/ServiceCategory.tsx @@ -1,6 +1,7 @@ import { Icon } from 'UI'; import React from 'react'; import cn from 'classnames'; +import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG'; function Category({ name, @@ -10,15 +11,18 @@ function Category({ isExpandable, isExpanded, isSelected, + isLoading, }: { name: string; - healthOk: boolean; + healthOk?: boolean; + isLoading?: boolean; onClick: (args: any) => void; isSelectable?: boolean; isExpandable?: boolean; isExpanded?: boolean; isSelected?: boolean; }) { + const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); return (
- + {isLoading ? ( + + ) : } {name} {isSelectable ? : null} diff --git a/frontend/app/components/Header/HealthStatus/const.ts b/frontend/app/components/Header/HealthStatus/const.ts index 3c13c52dd..69b5b1c5e 100644 --- a/frontend/app/components/Header/HealthStatus/const.ts +++ b/frontend/app/components/Header/HealthStatus/const.ts @@ -3,4 +3,7 @@ export const categoryKeyNames = { databases: 'Databases', ingestionPipeline: 'Ingestion Pipeline', ssl: 'SSL', -} as const \ No newline at end of file +} as const + +export const lastAskedKey = '__openreplay_health_status'; +export const healthResponseKey = '__openreplay_health_response'; \ No newline at end of file diff --git a/frontend/app/components/Header/HealthStatus/getHealth.ts b/frontend/app/components/Header/HealthStatus/getHealth.ts new file mode 100644 index 000000000..70bd8914c --- /dev/null +++ b/frontend/app/components/Header/HealthStatus/getHealth.ts @@ -0,0 +1,36 @@ +import { healthService } from 'App/services'; +import { categoryKeyNames, lastAskedKey, healthResponseKey } from "Components/Header/HealthStatus/const"; +import { IServiceStats } from "Components/Header/HealthStatus/HealthStatus"; + + +function mapResponse(resp: Record) { + const services = Object.keys(resp); + const healthMap: Record = {}; + services.forEach((service) => { + healthMap[service] = { + // @ts-ignore + name: categoryKeyNames[service], + healthOk: true, + subservices: resp[service], + serviceName: service, + }; + Object.values(healthMap[service].subservices).forEach((subservice: Record) => { + if (!subservice?.health) healthMap[service].healthOk = false; + }); + }); + + const overallHealth = Object.values(healthMap).every( + (service: Record) => service.healthOk + ); + + return { overallHealth, healthMap }; +} + +export async function getHealthRequest() { + const r = await healthService.fetchStatus(); + const healthMap = mapResponse(r); + const asked = new Date().getTime(); + localStorage.setItem(healthResponseKey, JSON.stringify(healthMap)); + localStorage.setItem(lastAskedKey, asked.toString()); + return { healthMap, asked } +} \ No newline at end of file diff --git a/frontend/app/components/Signup/Signup.js b/frontend/app/components/Signup/Signup.js index 83a658ec1..f5d61564f 100644 --- a/frontend/app/components/Signup/Signup.js +++ b/frontend/app/components/Signup/Signup.js @@ -6,6 +6,8 @@ import stl from './signup.module.css'; import cn from 'classnames'; import SignupForm from './SignupForm'; import RegisterBg from '../../svg/register.svg'; +import HealthModal from 'Components/Header/HealthStatus/HealthModal/HealthModal'; +import { getHealthRequest } from 'Components/Header/HealthStatus/getHealth'; const BulletItem = ({ text }) => (
@@ -15,9 +17,45 @@ const BulletItem = ({ text }) => (
{text}
); + +const healthStatusCheck_key = '__or__healthStatusCheck_key' + @withPageTitle('Signup - OpenReplay') export default class Signup extends React.Component { + state = { + healthModalPassed: localStorage.getItem(healthStatusCheck_key === 'true'), + healthStatusLoading: true, + healthStatus: null, + } + + getHealth = async () => { + this.setState({ healthStatusLoading: true }); + const { healthMap } = await getHealthRequest(); + this.setState({ healthStatus: healthMap, healthStatusLoading: false }); + } + + componentDidMount() { + if (!this.state.healthModalPassed) void this.getHealth(); + } + + setHealthModalPassed = () => { + localStorage.setItem(healthStatusCheck_key, 'true'); + this.setState({ healthModalPassed: true }); + } + render() { + if (!this.state.healthModalPassed) { + return ( + null} + healthResponse={this.state.healthStatus} + getHealth={this.getHealth} + isLoading={this.state.healthStatusLoading} + setPassed={this.setHealthModalPassed} + /> + ) + } + return (
diff --git a/frontend/app/components/ui/SVG.tsx b/frontend/app/components/ui/SVG.tsx index 95254b16c..a2322c730 100644 --- a/frontend/app/components/ui/SVG.tsx +++ b/frontend/app/components/ui/SVG.tsx @@ -1,7 +1,7 @@ import React from 'react'; -export type IconNames = 'activity' | 'alarm-clock' | 'alarm-plus' | 'all-sessions' | 'analytics' | 'anchor' | 'arrow-alt-square-right' | 'arrow-bar-left' | 'arrow-clockwise' | 'arrow-counterclockwise' | 'arrow-down-short' | 'arrow-down' | 'arrow-repeat' | 'arrow-right-short' | 'arrow-square-left' | 'arrow-square-right' | 'arrow-up-short' | 'arrow-up' | 'arrows-angle-extend' | 'avatar/icn_bear' | 'avatar/icn_beaver' | 'avatar/icn_bird' | 'avatar/icn_bison' | 'avatar/icn_camel' | 'avatar/icn_chameleon' | 'avatar/icn_deer' | 'avatar/icn_dog' | 'avatar/icn_dolphin' | 'avatar/icn_elephant' | 'avatar/icn_fish' | 'avatar/icn_fox' | 'avatar/icn_gorilla' | 'avatar/icn_hippo' | 'avatar/icn_horse' | 'avatar/icn_hyena' | 'avatar/icn_kangaroo' | 'avatar/icn_lemur' | 'avatar/icn_mammel' | 'avatar/icn_monkey' | 'avatar/icn_moose' | 'avatar/icn_panda' | 'avatar/icn_penguin' | 'avatar/icn_porcupine' | 'avatar/icn_quail' | 'avatar/icn_rabbit' | 'avatar/icn_rhino' | 'avatar/icn_sea_horse' | 'avatar/icn_sheep' | 'avatar/icn_snake' | 'avatar/icn_squirrel' | 'avatar/icn_tapir' | 'avatar/icn_turtle' | 'avatar/icn_vulture' | 'avatar/icn_wild1' | 'avatar/icn_wild_bore' | 'ban' | 'bar-chart-line' | 'bar-pencil' | 'bell-fill' | 'bell-plus' | 'bell-slash' | 'bell' | 'binoculars' | 'book' | 'browser/browser' | 'browser/chrome' | 'browser/edge' | 'browser/electron' | 'browser/facebook' | 'browser/firefox' | 'browser/ie' | 'browser/opera' | 'browser/safari' | 'bullhorn' | 'business-time' | 'calendar-alt' | 'calendar-check' | 'calendar-day' | 'calendar' | 'call' | 'camera-alt' | 'camera-video-off' | 'camera-video' | 'camera' | 'card-checklist' | 'card-text' | 'caret-down-fill' | 'caret-left-fill' | 'caret-right-fill' | 'caret-up-fill' | 'chat-dots' | 'chat-right-text' | 'chat-square-quote' | 'check-circle-fill' | 'check-circle' | 'check' | 'chevron-double-left' | 'chevron-double-right' | 'chevron-down' | 'chevron-left' | 'chevron-right' | 'chevron-up' | 'circle-fill' | 'circle' | 'click-hesitation' | 'click-rage' | 'clipboard-list-check' | 'clock' | 'close' | 'cloud-fog2-fill' | 'code' | 'cog' | 'cogs' | 'collection' | 'columns-gap-filled' | 'columns-gap' | 'console/error' | 'console/exception' | 'console/info' | 'console/warning' | 'console' | 'controller' | 'cookies' | 'copy' | 'credit-card-front' | 'cross' | 'cubes' | 'cursor-trash' | 'dash' | 'dashboard-icn' | 'desktop' | 'device' | 'diagram-3' | 'dizzy' | 'door-closed' | 'doublecheck' | 'download' | 'drag' | 'edit' | 'ellipsis-v' | 'enter' | 'envelope' | 'errors-icon' | 'event/click' | 'event/click_hesitation' | 'event/clickrage' | 'event/code' | 'event/i-cursor' | 'event/input' | 'event/input_hesitation' | 'event/link' | 'event/location' | 'event/mouse_thrashing' | 'event/resize' | 'event/view' | 'exclamation-circle-fill' | 'exclamation-circle' | 'expand-wide' | 'explosion' | 'external-link-alt' | 'eye-slash-fill' | 'eye-slash' | 'eye' | 'fetch' | 'file-code' | 'file-medical-alt' | 'file-pdf' | 'file' | 'files' | 'filter' | 'filters/arrow-return-right' | 'filters/browser' | 'filters/click' | 'filters/clickrage' | 'filters/code' | 'filters/console' | 'filters/country' | 'filters/cpu-load' | 'filters/custom' | 'filters/device' | 'filters/dom-complete' | 'filters/duration' | 'filters/error' | 'filters/fetch-failed' | 'filters/fetch' | 'filters/file-code' | 'filters/graphql' | 'filters/i-cursor' | 'filters/input' | 'filters/lcpt' | 'filters/link' | 'filters/location' | 'filters/memory-load' | 'filters/metadata' | 'filters/os' | 'filters/perfromance-network-request' | 'filters/platform' | 'filters/referrer' | 'filters/resize' | 'filters/rev-id' | 'filters/state-action' | 'filters/ttfb' | 'filters/user-alt' | 'filters/userid' | 'filters/view' | 'flag-na' | 'folder-plus' | 'folder2' | 'fullscreen' | 'funnel/cpu-fill' | 'funnel/cpu' | 'funnel/dizzy' | 'funnel/emoji-angry-fill' | 'funnel/emoji-angry' | 'funnel/emoji-dizzy-fill' | 'funnel/exclamation-circle-fill' | 'funnel/exclamation-circle' | 'funnel/file-earmark-break-fill' | 'funnel/file-earmark-break' | 'funnel/file-earmark-minus-fill' | 'funnel/file-earmark-minus' | 'funnel/file-medical-alt' | 'funnel/file-x' | 'funnel/hdd-fill' | 'funnel/hourglass-top' | 'funnel/image-fill' | 'funnel/image' | 'funnel/microchip' | 'funnel/mouse' | 'funnel/patch-exclamation-fill' | 'funnel/sd-card' | 'funnel-fill' | 'funnel-new' | 'funnel' | 'gear-fill' | 'gear' | 'geo-alt-fill-custom' | 'github' | 'graph-up-arrow' | 'graph-up' | 'grid-1x2' | 'grid-3x3' | 'grid-check' | 'grid-horizontal' | 'grid' | 'grip-horizontal' | 'hash' | 'hdd-stack' | 'headset' | 'heart-rate' | 'high-engagement' | 'history' | 'hourglass-start' | 'ic-errors' | 'ic-network' | 'ic-rage' | 'ic-resources' | 'id-card' | 'image' | 'info-circle-fill' | 'info-circle' | 'info-square' | 'info' | 'input-hesitation' | 'inspect' | 'integrations/assist' | 'integrations/bugsnag-text' | 'integrations/bugsnag' | 'integrations/cloudwatch-text' | 'integrations/cloudwatch' | 'integrations/datadog' | 'integrations/elasticsearch-text' | 'integrations/elasticsearch' | 'integrations/github' | 'integrations/graphql' | 'integrations/jira-text' | 'integrations/jira' | 'integrations/mobx' | 'integrations/newrelic-text' | 'integrations/newrelic' | 'integrations/ngrx' | 'integrations/openreplay-text' | 'integrations/openreplay' | 'integrations/redux' | 'integrations/rollbar-text' | 'integrations/rollbar' | 'integrations/segment' | 'integrations/sentry-text' | 'integrations/sentry' | 'integrations/slack-bw' | 'integrations/slack' | 'integrations/stackdriver' | 'integrations/sumologic-text' | 'integrations/sumologic' | 'integrations/teams-white' | 'integrations/teams' | 'integrations/vuejs' | 'journal-code' | 'layer-group' | 'lightbulb-on' | 'lightbulb' | 'link-45deg' | 'list-alt' | 'list-arrow' | 'list-ul' | 'list' | 'lock-alt' | 'magic' | 'map-marker-alt' | 'memory' | 'mic-mute' | 'mic' | 'minus' | 'mobile' | 'mouse-alt' | 'network' | 'next1' | 'no-dashboard' | 'no-metrics-chart' | 'no-metrics' | 'no-recordings' | 'os/android' | 'os/chrome_os' | 'os/fedora' | 'os/ios' | 'os/linux' | 'os/mac_os_x' | 'os/other' | 'os/ubuntu' | 'os/windows' | 'os' | 'pause-fill' | 'pause' | 'pdf-download' | 'pencil-stop' | 'pencil' | 'percent' | 'performance-icon' | 'person-fill' | 'person' | 'pie-chart-fill' | 'pin-fill' | 'play-circle-bold' | 'play-circle-light' | 'play-circle' | 'play-fill-new' | 'play-fill' | 'play-hover' | 'play' | 'plus-circle' | 'plus-lg' | 'plus' | 'pointer-sessions-search' | 'prev1' | 'pulse' | 'puzzle-piece' | 'puzzle' | 'question-circle' | 'question-lg' | 'quote-left' | 'quote-right' | 'quotes' | 'record-circle' | 'redo-back' | 'redo' | 'remote-control' | 'replay-10' | 'resources-icon' | 'safe-fill' | 'safe' | 'sandglass' | 'search' | 'search_notification' | 'server' | 'share-alt' | 'shield-lock' | 'signpost-split' | 'signup' | 'skip-forward-fill' | 'skip-forward' | 'slack' | 'slash-circle' | 'sliders' | 'social/slack' | 'social/trello' | 'speedometer2' | 'spinner' | 'star-solid' | 'star' | 'step-forward' | 'stop-record-circle' | 'stopwatch' | 'store' | 'sync-alt' | 'table-new' | 'table' | 'tablet-android' | 'tachometer-slow' | 'tachometer-slowest' | 'tags' | 'team-funnel' | 'telephone-fill' | 'telephone' | 'text-paragraph' | 'tools' | 'trash' | 'turtle' | 'user-alt' | 'user-circle' | 'user-friends' | 'users' | 'vendors/graphql' | 'vendors/mobx' | 'vendors/ngrx' | 'vendors/redux' | 'vendors/vuex' | 'web-vitals' | 'wifi' | 'window-alt' | 'window-restore' | 'window-x' | 'window' | 'zoom-in'; +export type IconNames = 'activity' | 'alarm-clock' | 'alarm-plus' | 'all-sessions' | 'analytics' | 'anchor' | 'arrow-alt-square-right' | 'arrow-bar-left' | 'arrow-clockwise' | 'arrow-counterclockwise' | 'arrow-down-short' | 'arrow-down' | 'arrow-repeat' | 'arrow-right-short' | 'arrow-square-left' | 'arrow-square-right' | 'arrow-up-short' | 'arrow-up' | 'arrows-angle-extend' | 'avatar/icn_bear' | 'avatar/icn_beaver' | 'avatar/icn_bird' | 'avatar/icn_bison' | 'avatar/icn_camel' | 'avatar/icn_chameleon' | 'avatar/icn_deer' | 'avatar/icn_dog' | 'avatar/icn_dolphin' | 'avatar/icn_elephant' | 'avatar/icn_fish' | 'avatar/icn_fox' | 'avatar/icn_gorilla' | 'avatar/icn_hippo' | 'avatar/icn_horse' | 'avatar/icn_hyena' | 'avatar/icn_kangaroo' | 'avatar/icn_lemur' | 'avatar/icn_mammel' | 'avatar/icn_monkey' | 'avatar/icn_moose' | 'avatar/icn_panda' | 'avatar/icn_penguin' | 'avatar/icn_porcupine' | 'avatar/icn_quail' | 'avatar/icn_rabbit' | 'avatar/icn_rhino' | 'avatar/icn_sea_horse' | 'avatar/icn_sheep' | 'avatar/icn_snake' | 'avatar/icn_squirrel' | 'avatar/icn_tapir' | 'avatar/icn_turtle' | 'avatar/icn_vulture' | 'avatar/icn_wild1' | 'avatar/icn_wild_bore' | 'ban' | 'bar-chart-line' | 'bar-pencil' | 'bell-fill' | 'bell-plus' | 'bell-slash' | 'bell' | 'binoculars' | 'book-doc' | 'book' | 'browser/browser' | 'browser/chrome' | 'browser/edge' | 'browser/electron' | 'browser/facebook' | 'browser/firefox' | 'browser/ie' | 'browser/opera' | 'browser/safari' | 'bullhorn' | 'business-time' | 'calendar-alt' | 'calendar-check' | 'calendar-day' | 'calendar' | 'call' | 'camera-alt' | 'camera-video-off' | 'camera-video' | 'camera' | 'card-checklist' | 'card-text' | 'caret-down-fill' | 'caret-left-fill' | 'caret-right-fill' | 'caret-up-fill' | 'chat-dots' | 'chat-right-text' | 'chat-square-quote' | 'check-circle-fill' | 'check-circle' | 'check' | 'chevron-double-left' | 'chevron-double-right' | 'chevron-down' | 'chevron-left' | 'chevron-right' | 'chevron-up' | 'circle-fill' | 'circle' | 'click-hesitation' | 'click-rage' | 'clipboard-list-check' | 'clock' | 'close' | 'cloud-fog2-fill' | 'code' | 'cog' | 'cogs' | 'collection' | 'columns-gap-filled' | 'columns-gap' | 'console/error' | 'console/exception' | 'console/info' | 'console/warning' | 'console' | 'controller' | 'cookies' | 'copy' | 'credit-card-front' | 'cross' | 'cubes' | 'cursor-trash' | 'dash' | 'dashboard-icn' | 'desktop' | 'device' | 'diagram-3' | 'dizzy' | 'door-closed' | 'doublecheck' | 'download' | 'drag' | 'edit' | 'ellipsis-v' | 'enter' | 'envelope' | 'errors-icon' | 'event/click' | 'event/click_hesitation' | 'event/clickrage' | 'event/code' | 'event/i-cursor' | 'event/input' | 'event/input_hesitation' | 'event/link' | 'event/location' | 'event/mouse_thrashing' | 'event/resize' | 'event/view' | 'exclamation-circle-fill' | 'exclamation-circle' | 'expand-wide' | 'explosion' | 'external-link-alt' | 'eye-slash-fill' | 'eye-slash' | 'eye' | 'fetch' | 'file-code' | 'file-medical-alt' | 'file-pdf' | 'file' | 'files' | 'filter' | 'filters/arrow-return-right' | 'filters/browser' | 'filters/click' | 'filters/clickrage' | 'filters/code' | 'filters/console' | 'filters/country' | 'filters/cpu-load' | 'filters/custom' | 'filters/device' | 'filters/dom-complete' | 'filters/duration' | 'filters/error' | 'filters/fetch-failed' | 'filters/fetch' | 'filters/file-code' | 'filters/graphql' | 'filters/i-cursor' | 'filters/input' | 'filters/lcpt' | 'filters/link' | 'filters/location' | 'filters/memory-load' | 'filters/metadata' | 'filters/os' | 'filters/perfromance-network-request' | 'filters/platform' | 'filters/referrer' | 'filters/resize' | 'filters/rev-id' | 'filters/state-action' | 'filters/ttfb' | 'filters/user-alt' | 'filters/userid' | 'filters/view' | 'flag-na' | 'folder-plus' | 'folder2' | 'fullscreen' | 'funnel/cpu-fill' | 'funnel/cpu' | 'funnel/dizzy' | 'funnel/emoji-angry-fill' | 'funnel/emoji-angry' | 'funnel/emoji-dizzy-fill' | 'funnel/exclamation-circle-fill' | 'funnel/exclamation-circle' | 'funnel/file-earmark-break-fill' | 'funnel/file-earmark-break' | 'funnel/file-earmark-minus-fill' | 'funnel/file-earmark-minus' | 'funnel/file-medical-alt' | 'funnel/file-x' | 'funnel/hdd-fill' | 'funnel/hourglass-top' | 'funnel/image-fill' | 'funnel/image' | 'funnel/microchip' | 'funnel/mouse' | 'funnel/patch-exclamation-fill' | 'funnel/sd-card' | 'funnel-fill' | 'funnel-new' | 'funnel' | 'gear-fill' | 'gear' | 'geo-alt-fill-custom' | 'github' | 'graph-up-arrow' | 'graph-up' | 'grid-1x2' | 'grid-3x3' | 'grid-check' | 'grid-horizontal' | 'grid' | 'grip-horizontal' | 'hash' | 'hdd-stack' | 'headset' | 'heart-rate' | 'high-engagement' | 'history' | 'hourglass-start' | 'ic-errors' | 'ic-network' | 'ic-rage' | 'ic-resources' | 'id-card' | 'image' | 'info-circle-fill' | 'info-circle' | 'info-square' | 'info' | 'input-hesitation' | 'inspect' | 'integrations/assist' | 'integrations/bugsnag-text' | 'integrations/bugsnag' | 'integrations/cloudwatch-text' | 'integrations/cloudwatch' | 'integrations/datadog' | 'integrations/elasticsearch-text' | 'integrations/elasticsearch' | 'integrations/github' | 'integrations/graphql' | 'integrations/jira-text' | 'integrations/jira' | 'integrations/mobx' | 'integrations/newrelic-text' | 'integrations/newrelic' | 'integrations/ngrx' | 'integrations/openreplay-text' | 'integrations/openreplay' | 'integrations/redux' | 'integrations/rollbar-text' | 'integrations/rollbar' | 'integrations/segment' | 'integrations/sentry-text' | 'integrations/sentry' | 'integrations/slack-bw' | 'integrations/slack' | 'integrations/stackdriver' | 'integrations/sumologic-text' | 'integrations/sumologic' | 'integrations/teams-white' | 'integrations/teams' | 'integrations/vuejs' | 'journal-code' | 'layer-group' | 'lightbulb-on' | 'lightbulb' | 'link-45deg' | 'list-alt' | 'list-arrow' | 'list-ul' | 'list' | 'lock-alt' | 'magic' | 'map-marker-alt' | 'memory' | 'mic-mute' | 'mic' | 'minus' | 'mobile' | 'mouse-alt' | 'network' | 'next1' | 'no-dashboard' | 'no-metrics-chart' | 'no-metrics' | 'no-recordings' | 'os/android' | 'os/chrome_os' | 'os/fedora' | 'os/ios' | 'os/linux' | 'os/mac_os_x' | 'os/other' | 'os/ubuntu' | 'os/windows' | 'os' | 'pause-fill' | 'pause' | 'pdf-download' | 'pencil-stop' | 'pencil' | 'percent' | 'performance-icon' | 'person-fill' | 'person' | 'pie-chart-fill' | 'pin-fill' | 'play-circle-bold' | 'play-circle-light' | 'play-circle' | 'play-fill-new' | 'play-fill' | 'play-hover' | 'play' | 'plus-circle' | 'plus-lg' | 'plus' | 'pointer-sessions-search' | 'prev1' | 'pulse' | 'puzzle-piece' | 'puzzle' | 'question-circle' | 'question-lg' | 'quote-left' | 'quote-right' | 'quotes' | 'record-circle' | 'redo-back' | 'redo' | 'remote-control' | 'replay-10' | 'resources-icon' | 'safe-fill' | 'safe' | 'sandglass' | 'search' | 'search_notification' | 'server' | 'share-alt' | 'shield-lock' | 'signpost-split' | 'signup' | 'skip-forward-fill' | 'skip-forward' | 'slack' | 'slash-circle' | 'sliders' | 'social/slack' | 'social/trello' | 'speedometer2' | 'spinner' | 'star-solid' | 'star' | 'step-forward' | 'stop-record-circle' | 'stopwatch' | 'store' | 'sync-alt' | 'table-new' | 'table' | 'tablet-android' | 'tachometer-slow' | 'tachometer-slowest' | 'tags' | 'team-funnel' | 'telephone-fill' | 'telephone' | 'text-paragraph' | 'tools' | 'trash' | 'turtle' | 'user-alt' | 'user-circle' | 'user-friends' | 'users' | 'vendors/graphql' | 'vendors/mobx' | 'vendors/ngrx' | 'vendors/redux' | 'vendors/vuex' | 'web-vitals' | 'wifi' | 'window-alt' | 'window-restore' | 'window-x' | 'window' | 'zoom-in'; interface Props { name: IconNames; @@ -78,6 +78,7 @@ const SVG = (props: Props) => { case 'bell-slash': return ; case 'bell': return ; case 'binoculars': return ; + case 'book-doc': return ; case 'book': return ; case 'browser/browser': return ; case 'browser/chrome': return ; diff --git a/frontend/app/svg/icons/book-doc.svg b/frontend/app/svg/icons/book-doc.svg new file mode 100644 index 000000000..7e6f2a680 --- /dev/null +++ b/frontend/app/svg/icons/book-doc.svg @@ -0,0 +1,10 @@ + + + + + + + + + + From 4bdb30daa3d11679e7f341244a26eb257e020078 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Thu, 23 Mar 2023 13:54:14 +0100 Subject: [PATCH 49/60] change(ui): fix error printing --- .../Header/HealthStatus/HealthModal/HealthModal.tsx | 2 +- .../HealthStatus/SubserviceHealth/SubserviceHealth.tsx | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx index 0587e5423..b0a6fb9fa 100644 --- a/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthModal/HealthModal.tsx @@ -135,7 +135,7 @@ function HealthModal({
) : null} -
+
); diff --git a/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx b/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx index 4de64ffbe..6fd91031b 100644 --- a/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx +++ b/frontend/app/components/Header/HealthStatus/SubserviceHealth/SubserviceHealth.tsx @@ -34,11 +34,7 @@ function SubserviceHealth({ {subservice?.details?.errors?.length ? (
Error log:
- {subservice.details.errors.map((err: string, i) => ( -
- {i + 1}. {err} -
- ))} + {subservice.details.errors.toString()}
) : subservice?.health ? null : ( 'Service not responding' From c1568b0929df6e05aa5381171e3ef6879b1c024a Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Fri, 24 Mar 2023 11:13:27 +0100 Subject: [PATCH 50/60] change(ui): fix logs --- .../app/player/web/managers/DOM/DOMManager.ts | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/frontend/app/player/web/managers/DOM/DOMManager.ts b/frontend/app/player/web/managers/DOM/DOMManager.ts index ab839118b..a5c2f2c2f 100644 --- a/frontend/app/player/web/managers/DOM/DOMManager.ts +++ b/frontend/app/player/web/managers/DOM/DOMManager.ts @@ -257,13 +257,9 @@ export default class DOMManager extends ListWalker { } return case MType.RemoveNodeAttribute: - if (isJump) { - this.attrsBacktrack = this.attrsBacktrack.filter(m => m.id !== msg.id && m.name !== msg.name) - } else { vn = this.vElements.get(msg.id) if (!vn) { logger.error("Node not found", msg); return } vn.removeAttribute(msg.name) - } return case MType.SetInputValue: vn = this.vElements.get(msg.id) @@ -478,23 +474,10 @@ export default class DOMManager extends ListWalker { * */ // http://0.0.0.0:3333/5/session/8452905874437457 // 70 iframe, 8 create element - STYLE tag - console.time('moveWait') - let t0 = performance.now() - let t1 = t0 - const timings = [] await this.moveWait(t, (msg) => { - t0 = performance.now() this.applyMessage(msg, isJump) - t1 = performance.now() - timings.push({ t: t1 - t0, m: msg.tp, msg }) }) - console.timeEnd('moveWait') - console.log( - timings.sort((a, b) => b.t - a.t), - timings.filter(t => t.msg.tag === 'STYLE').length, - ) - if (isJump) { this.attrsBacktrack.forEach(msg => { this.applyBacktrack(msg) From 5ddb0bbad4cb9bb33f73622dcd698d6a30a9cbd8 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Wed, 8 Mar 2023 09:37:48 +0100 Subject: [PATCH 51/60] feat(tracker): track network requests in iframes --- tracker/tracker/src/main/app/index.ts | 5 ++ .../src/main/app/observer/iframe_observer.ts | 4 ++ tracker/tracker/src/main/modules/network.ts | 64 +++++++++++++++---- 3 files changed, 60 insertions(+), 13 deletions(-) diff --git a/tracker/tracker/src/main/app/index.ts b/tracker/tracker/src/main/app/index.ts index faca22b9d..4c770f664 100644 --- a/tracker/tracker/src/main/app/index.ts +++ b/tracker/tracker/src/main/app/index.ts @@ -14,6 +14,8 @@ import type { Options as ObserverOptions } from './observer/top_observer.js' import type { Options as SanitizerOptions } from './sanitizer.js' import type { Options as LoggerOptions } from './logger.js' import type { Options as SessOptions } from './session.js' +import type { Options as NetworkOptions } from '../modules/network.js' + import type { Options as WebworkerOptions, ToWorkerData, @@ -75,6 +77,7 @@ type AppOptions = { // @deprecated onStart?: StartCallback + network?: NetworkOptions } & WebworkerOptions & SessOptions @@ -99,6 +102,7 @@ export default class App { private readonly stopCallbacks: Array<() => any> = [] private readonly commitCallbacks: Array = [] private readonly options: AppOptions + public readonly networkOptions?: NetworkOptions private readonly revID: string private activityState: ActivityState = ActivityState.NotActive private readonly version = 'TRACKER_VERSION' // TODO: version compatability check inside each plugin. @@ -109,6 +113,7 @@ export default class App { // } ?? maybe onStart is good this.projectKey = projectKey + this.networkOptions = options.network this.options = Object.assign( { revID: '', diff --git a/tracker/tracker/src/main/app/observer/iframe_observer.ts b/tracker/tracker/src/main/app/observer/iframe_observer.ts index 05df1fe54..e3b82b1d9 100644 --- a/tracker/tracker/src/main/app/observer/iframe_observer.ts +++ b/tracker/tracker/src/main/app/observer/iframe_observer.ts @@ -1,10 +1,13 @@ import Observer from './observer.js' import { CreateIFrameDocument } from '../messages.gen.js' +import Network from '../../modules/network.js' export default class IFrameObserver extends Observer { observe(iframe: HTMLIFrameElement) { const doc = iframe.contentDocument + const iWindow = iframe.contentWindow const hostID = this.app.nodes.getID(iframe) + console.log(iframe) if (!doc || hostID === undefined) { return } //log TODO common app.logger @@ -16,6 +19,7 @@ export default class IFrameObserver extends Observer { return } this.app.send(CreateIFrameDocument(hostID, docID)) + Network(this.app, this.app.networkOptions, iWindow!) }) } } diff --git a/tracker/tracker/src/main/modules/network.ts b/tracker/tracker/src/main/modules/network.ts index a1a3e2c9c..097648d54 100644 --- a/tracker/tracker/src/main/modules/network.ts +++ b/tracker/tracker/src/main/modules/network.ts @@ -92,7 +92,7 @@ export interface Options { sanitizer?: Sanitizer } -export default function (app: App, opts: Partial = {}) { +export default function (app: App, opts: Partial = {}, customEnv?: Record) { const options: Options = Object.assign( { failuresOnly: false, @@ -150,8 +150,11 @@ export default function (app: App, opts: Partial = {}) { } /* ====== Fetch ====== */ - const origFetch = window.fetch.bind(window) as WindowFetch - window.fetch = (input, init = {}) => { + const origFetch = customEnv + ? (customEnv.fetch.bind(customEnv) as WindowFetch) + : (window.fetch.bind(window) as WindowFetch) + + const trackFetch = (input: RequestInfo | URL, init: RequestInit = {}) => { if (!(typeof input === 'string' || input instanceof URL) || app.isServiceURL(String(input))) { return origFetch(input, init) } @@ -237,12 +240,23 @@ export default function (app: App, opts: Partial = {}) { return response }) } + + if (customEnv) { + customEnv.fetch = trackFetch + } else { + window.fetch = trackFetch + } /* ====== <> ====== */ /* ====== XHR ====== */ - const nativeOpen = XMLHttpRequest.prototype.open - XMLHttpRequest.prototype.open = function (initMethod, url) { - const xhr = this + + const nativeOpen = customEnv + ? customEnv.XMLHttpRequest.prototype.open + : XMLHttpRequest.prototype.open + + function trackXMLHttpReqOpen(initMethod: string, url: string | URL) { + // @ts-ignore ??? this -> XMLHttpRequest + const xhr = this as XMLHttpRequest setSessionTokenHeader((name, value) => xhr.setRequestHeader(name, value)) let startTime = 0 @@ -302,23 +316,47 @@ export default function (app: App, opts: Partial = {}) { //TODO: handle error (though it has no Error API nor any useful information) //xhr.addEventListener('error', (e) => {}) - return nativeOpen.apply(this, arguments) + // @ts-ignore ??? this -> XMLHttpRequest + return nativeOpen.apply(this as XMLHttpRequest, arguments) } + if (customEnv) { + customEnv.XMLHttpRequest.prototype.open = trackXMLHttpReqOpen.bind(customEnv) + } else { + XMLHttpRequest.prototype.open = trackXMLHttpReqOpen + } + const nativeSend = XMLHttpRequest.prototype.send - XMLHttpRequest.prototype.send = function (body) { - const rdo = getXHRRequestDataObject(this) + function trackXHRSend(body: Document | XMLHttpRequestBodyInit | null | undefined) { + // @ts-ignore ??? this -> XMLHttpRequest + const rdo = getXHRRequestDataObject(this as XMLHttpRequest) rdo.body = body - return nativeSend.apply(this, arguments) + // @ts-ignore ??? this -> XMLHttpRequest + return nativeSend.apply(this as XMLHttpRequest, arguments) } + + if (customEnv) { + customEnv.XMLHttpRequest.prototype.send = trackXHRSend.bind(customEnv) + } else { + XMLHttpRequest.prototype.send = trackXHRSend + } + const nativeSetRequestHeader = XMLHttpRequest.prototype.setRequestHeader - XMLHttpRequest.prototype.setRequestHeader = function (name, value) { + + function trackSetReqHeader(name: string, value: string) { if (!isHIgnored(name)) { - const rdo = getXHRRequestDataObject(this) + // @ts-ignore ??? this -> XMLHttpRequest + const rdo = getXHRRequestDataObject(this as XMLHttpRequest) rdo.headers[name] = value } + // @ts-ignore ??? this -> XMLHttpRequest + return nativeSetRequestHeader.apply(this as XMLHttpRequest, arguments) + } - return nativeSetRequestHeader.apply(this, arguments) + if (customEnv) { + customEnv.XMLHttpRequest.prototype.setRequestHeader = trackSetReqHeader.bind(customEnv) + } else { + XMLHttpRequest.prototype.setRequestHeader = trackSetReqHeader } /* ====== <> ====== */ } From bd935b2f979b38d1309f93f464f6a7031960629d Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Thu, 9 Mar 2023 16:45:33 +0100 Subject: [PATCH 52/60] fix(tracker): iframe network tracking --- frontend/app/logger/index.js | 23 +++++++++++++------ .../app/player/web/messages/MFileReader.ts | 6 ++--- .../src/main/app/observer/iframe_observer.ts | 4 ---- .../src/main/app/observer/top_observer.ts | 2 ++ 4 files changed, 20 insertions(+), 15 deletions(-) diff --git a/frontend/app/logger/index.js b/frontend/app/logger/index.js index 353f186e9..caf6d7bee 100644 --- a/frontend/app/logger/index.js +++ b/frontend/app/logger/index.js @@ -24,18 +24,27 @@ function error(...args) { } let groupTm = {}; +let groupedLogs = {}; function group(groupName, ...args) { if (!window.env.PRODUCTION || options.verbose) { - if (!groupTm[groupName]) { - groupTm[groupName] = setTimeout(() => { - console.groupEnd() - delete groupTm[groupName] - }, 500); - console.groupCollapsed(groupName); + if (groupTm[groupName]) { + clearTimeout(groupTm[groupName]) + groupTm[groupName] = null + } else { + groupedLogs[groupName] = [] } - console.log(...args); + groupedLogs[groupName].push(args); + groupTm[groupName] = setTimeout(() => { + console.groupCollapsed(groupName) + groupedLogs[groupName].forEach((log) => { + console.log(...log) + }) + console.groupEnd() + delete groupTm[groupName] + delete groupedLogs[groupName] + }, 500) options.exceptionsLogs.push(args) } } diff --git a/frontend/app/player/web/messages/MFileReader.ts b/frontend/app/player/web/messages/MFileReader.ts index d1b131595..b5fdde85c 100644 --- a/frontend/app/player/web/messages/MFileReader.ts +++ b/frontend/app/player/web/messages/MFileReader.ts @@ -3,7 +3,7 @@ import type { RawMessage } from './raw.gen'; import { MType } from './raw.gen'; import RawMessageReader from './RawMessageReader.gen'; import resolveURL from './urlBasedResolver' - +import Logger from 'App/logger' // TODO: composition instead of inheritance // needSkipMessage() and next() methods here use buf and p protected properties, @@ -59,10 +59,8 @@ export default class MFileReader extends RawMessageReader { if (!skippedMessage) { return null } - this.logger.group("Openreplay: Skipping messages ", skippedMessage) - + Logger.group("Openreplay: Skipping messages ", skippedMessage) } - this.pLastMessageID = this.p const rMsg = this.readRawMessage() diff --git a/tracker/tracker/src/main/app/observer/iframe_observer.ts b/tracker/tracker/src/main/app/observer/iframe_observer.ts index e3b82b1d9..05df1fe54 100644 --- a/tracker/tracker/src/main/app/observer/iframe_observer.ts +++ b/tracker/tracker/src/main/app/observer/iframe_observer.ts @@ -1,13 +1,10 @@ import Observer from './observer.js' import { CreateIFrameDocument } from '../messages.gen.js' -import Network from '../../modules/network.js' export default class IFrameObserver extends Observer { observe(iframe: HTMLIFrameElement) { const doc = iframe.contentDocument - const iWindow = iframe.contentWindow const hostID = this.app.nodes.getID(iframe) - console.log(iframe) if (!doc || hostID === undefined) { return } //log TODO common app.logger @@ -19,7 +16,6 @@ export default class IFrameObserver extends Observer { return } this.app.send(CreateIFrameDocument(hostID, docID)) - Network(this.app, this.app.networkOptions, iWindow!) }) } } diff --git a/tracker/tracker/src/main/app/observer/top_observer.ts b/tracker/tracker/src/main/app/observer/top_observer.ts index 38944c5c9..7eb15c15b 100644 --- a/tracker/tracker/src/main/app/observer/top_observer.ts +++ b/tracker/tracker/src/main/app/observer/top_observer.ts @@ -1,5 +1,6 @@ import Observer from './observer.js' import { isElementNode, hasTag } from '../guards.js' +import Network from '../../modules/network.js' import IFrameObserver from './iframe_observer.js' import ShadowRootObserver from './shadow_root_observer.js' @@ -92,6 +93,7 @@ export default class TopObserver extends Observer { //TODO: more explicit logic ) { this.contextsSet.add(currentWin) + Network(this.app, this.app.networkOptions, currentWin) //@ts-ignore https://github.com/microsoft/TypeScript/issues/41684 this.contextCallbacks.forEach((cb) => cb(currentWin)) } From 1cf9e54f5a9383053da534650a009f6ccecb5772 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Fri, 24 Mar 2023 12:07:20 +0100 Subject: [PATCH 53/60] change(tracker): more configs for mouse module --- tracker/tracker/CHANGELOG.md | 3 ++- tracker/tracker/src/main/modules/mouse.ts | 33 ++++++++++++++++++----- 2 files changed, 28 insertions(+), 8 deletions(-) diff --git a/tracker/tracker/CHANGELOG.md b/tracker/tracker/CHANGELOG.md index 90c68a91f..fa8ab398a 100644 --- a/tracker/tracker/CHANGELOG.md +++ b/tracker/tracker/CHANGELOG.md @@ -4,7 +4,8 @@ - Capture DOM node drop event (>30% nodes removed) - Capture iframe network requests - Detect cached requests to img, css and js resources; send transferred size -- added `{ network: { disableClickmaps: boolean } }` to disable calculating el. selectors +- added `{ mouse: { disableClickmaps: boolean } }` to disable calculating el. selectors +- added `{ mouse: { minSelectorDepth?: number; nthThreshold?: number; maxOptimiseTries?: number }` for selector finding optimisations ## 5.0.1 diff --git a/tracker/tracker/src/main/modules/mouse.ts b/tracker/tracker/src/main/modules/mouse.ts index fb69bef08..5bba2ddda 100644 --- a/tracker/tracker/src/main/modules/mouse.ts +++ b/tracker/tracker/src/main/modules/mouse.ts @@ -5,13 +5,13 @@ import { MouseMove, MouseClick, MouseThrashing } from '../app/messages.gen.js' import { getInputLabel } from './input.js' import { finder } from '@medv/finder' -function _getSelector(target: Element, document: Document) { +function _getSelector(target: Element, document: Document, options?: MouseHandlerOptions): string { const selector = finder(target, { root: document.body, seedMinLength: 3, - optimizedMinLength: 2, - threshold: 1000, - maxNumberOfTries: 10_000, + optimizedMinLength: options?.minSelectorDepth || 2, + threshold: options?.nthThreshold || 1000, + maxNumberOfTries: options?.maxOptimiseTries || 10_000, }) return selector @@ -75,6 +75,25 @@ function _getTarget(target: Element, document: Document): Element | null { export interface MouseHandlerOptions { disableClickmaps?: boolean + /** minimum length of an optimised selector. + * + * body > div > div > p => body > p for example + * + * default 2 + * */ + minSelectorDepth?: number + /** how many selectors to try before falling back to nth-child selectors + * performance expensive operation + * + * default 1000 + * */ + nthThreshold?: number + /** + * how many tries to optimise and shorten the selector + * + * default 10_000 + * */ + maxOptimiseTries?: number } export default function (app: App, options?: MouseHandlerOptions): void { @@ -155,8 +174,8 @@ export default function (app: App, options?: MouseHandlerOptions): void { } const patchDocument = (document: Document, topframe = false) => { - function getSelector(id: number, target: Element): string { - return (selectorMap[id] = selectorMap[id] || _getSelector(target, document)) + function getSelector(id: number, target: Element, options?: MouseHandlerOptions): string { + return (selectorMap[id] = selectorMap[id] || _getSelector(target, document, options)) } const attachListener = topframe @@ -202,7 +221,7 @@ export default function (app: App, options?: MouseHandlerOptions): void { id, mouseTarget === target ? Math.round(performance.now() - mouseTargetTime) : 0, getTargetLabel(target), - isClickable(target) && !disableClickmaps ? getSelector(id, target) : '', + isClickable(target) && !disableClickmaps ? getSelector(id, target, options) : '', ), true, ) From ff762b9853bcc939cdd945f8c51cbf1a21257bae Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Mar 2023 13:30:00 +0100 Subject: [PATCH 54/60] feat(chalice): changed health check endpoint --- api/chalicelib/core/health.py | 17 +------- ee/api/chalicelib/core/health.py | 68 +++++++++++++------------------- 2 files changed, 30 insertions(+), 55 deletions(-) diff --git a/api/chalicelib/core/health.py b/api/chalicelib/core/health.py index f4e2abc6c..a9a54977c 100644 --- a/api/chalicelib/core/health.py +++ b/api/chalicelib/core/health.py @@ -14,7 +14,6 @@ if config("LOCAL_DEV", cast=bool, default=False): "chalice": "http://127.0.0.1:8888/metrics", "db": "http://127.0.0.1:8888/metrics", "ender": "http://127.0.0.1:8888/metrics", - "frontend": "http://127.0.0.1:8888/metrics", "heuristics": "http://127.0.0.1:8888/metrics", "http": "http://127.0.0.1:8888/metrics", "ingress-nginx": "http://127.0.0.1:8888/metrics", @@ -35,7 +34,6 @@ else: "chalice": "http://chalice-openreplay.app.svc.cluster.local:8888/metrics", "db": "http://db-openreplay.app.svc.cluster.local:8888/metrics", "ender": "http://ender-openreplay.app.svc.cluster.local:8888/metrics", - "frontend": "http://frontend-openreplay.app.svc.cluster.local:8888/metrics", "heuristics": "http://heuristics-openreplay.app.svc.cluster.local:8888/metrics", "http": "http://http-openreplay.app.svc.cluster.local:8888/metrics", "ingress-nginx": "http://ingress-nginx-openreplay.app.svc.cluster.local:8888/metrics", @@ -157,7 +155,7 @@ def get_health(): "chalice": __always_healthy_with_version, "db": __check_be_service("db"), "ender": __check_be_service("ender"), - "frontend": __check_be_service("frontend"), + "frontend": __always_healthy, "heuristics": __check_be_service("heuristics"), "http": __check_be_service("http"), "ingress-nginx": __always_healthy, @@ -166,18 +164,7 @@ def get_health(): "sink": __check_be_service("sink"), "sourcemapreader": __check_be_service("sourcemapreader"), "storage": __check_be_service("storage") - }, - # "overall": { - # "health": "na", - # "details": { - # "numberOfEventCaptured": "int", - # "numberOfSessionsCaptured": "int" - # }, - # "labels": { - # "parent": "information" - # } - # }, - # "ssl": True + } } for parent_key in health_map.keys(): for element_key in health_map[parent_key]: diff --git a/ee/api/chalicelib/core/health.py b/ee/api/chalicelib/core/health.py index e00747288..4b95888d1 100644 --- a/ee/api/chalicelib/core/health.py +++ b/ee/api/chalicelib/core/health.py @@ -15,7 +15,6 @@ if config("LOCAL_DEV", cast=bool, default=False): "chalice": "http://127.0.0.1:8888/metrics", "db": "http://127.0.0.1:8888/metrics", "ender": "http://127.0.0.1:8888/metrics", - "frontend": "http://127.0.0.1:8888/metrics", "heuristics": "http://127.0.0.1:8888/metrics", "http": "http://127.0.0.1:8888/metrics", "ingress-nginx": "http://127.0.0.1:8888/metrics", @@ -36,7 +35,6 @@ else: "chalice": "http://chalice-openreplay.app.svc.cluster.local:8888/metrics", "db": "http://db-openreplay.app.svc.cluster.local:8888/metrics", "ender": "http://ender-openreplay.app.svc.cluster.local:8888/metrics", - "frontend": "http://frontend-openreplay.app.svc.cluster.local:8888/metrics", "heuristics": "http://heuristics-openreplay.app.svc.cluster.local:8888/metrics", "http": "http://http-openreplay.app.svc.cluster.local:8888/metrics", "ingress-nginx": "http://ingress-nginx-openreplay.app.svc.cluster.local:8888/metrics", @@ -153,6 +151,7 @@ def get_health(): "ingestionPipeline": { "redis": __check_redis, # "kafka": __check_kafka + "kafka": __always_healthy }, "backendServices": { "alerts": __check_be_service("alerts"), @@ -161,7 +160,7 @@ def get_health(): "chalice": __always_healthy_with_version, "db": __check_be_service("db"), "ender": __check_be_service("ender"), - "frontend": __check_be_service("frontend"), + "frontend": __always_healthy, "heuristics": __check_be_service("heuristics"), "http": __check_be_service("http"), "ingress-nginx": __always_healthy, @@ -171,18 +170,7 @@ def get_health(): "sink": __check_be_service("sink"), "sourcemapreader": __check_be_service("sourcemapreader"), "storage": __check_be_service("storage") - }, - # "overall": { - # "health": "na", - # "details": { - # "numberOfEventCaptured": "int", - # "numberOfSessionsCaptured": "int" - # }, - # "labels": { - # "parent": "information" - # } - # }, - # "ssl": True + } } for parent_key in health_map.keys(): for element_key in health_map[parent_key]: @@ -213,28 +201,28 @@ def __check_database_ch(): } -def __check_kafka(): - fail_response = { - "health": False, - "details": {"errors": ["server health-check failed"]} - } - if config("KAFKA_SERVERS", default=None) is None: - fail_response["details"]["errors"].append("KAFKA_SERVERS not defined in env-vars") - return fail_response - - try: - a = AdminClient({'bootstrap.servers': config("KAFKA_SERVERS"), "socket.connection.setup.timeout.ms": 3000}) - topics = a.list_topics().topics - if not topics: - raise Exception('topics not found') - - except Exception as e: - print("!! Issue getting kafka-health response") - print(str(e)) - fail_response["details"]["errors"].append(str(e)) - return fail_response - - return { - "health": True, - "details": {} - } +# def __check_kafka(): +# fail_response = { +# "health": False, +# "details": {"errors": ["server health-check failed"]} +# } +# if config("KAFKA_SERVERS", default=None) is None: +# fail_response["details"]["errors"].append("KAFKA_SERVERS not defined in env-vars") +# return fail_response +# +# try: +# a = AdminClient({'bootstrap.servers': config("KAFKA_SERVERS"), "socket.connection.setup.timeout.ms": 3000}) +# topics = a.list_topics().topics +# if not topics: +# raise Exception('topics not found') +# +# except Exception as e: +# print("!! Issue getting kafka-health response") +# print(str(e)) +# fail_response["details"]["errors"].append(str(e)) +# return fail_response +# +# return { +# "health": True, +# "details": {} +# } From 681241cad8ed6d62073bbe1acc7433f4ad3af015 Mon Sep 17 00:00:00 2001 From: Mehdi Osman Date: Fri, 24 Mar 2023 11:56:30 -0400 Subject: [PATCH 55/60] Added redis-py --- third-party.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/third-party.md b/third-party.md index 0cfe2cac2..ac17bb869 100644 --- a/third-party.md +++ b/third-party.md @@ -1,4 +1,4 @@ -## Licenses (as of January 23, 2023) +## Licenses (as of March 24, 2023) Below is the list of dependencies used in OpenReplay software. Licenses may change between versions, so please keep this up to date with every new library you use. @@ -40,6 +40,7 @@ Below is the list of dependencies used in OpenReplay software. Licenses may chan | python-multipart | Apache | Python | | elasticsearch-py | Apache2 | Python | | jira | BSD2 | Python | +| redis-py | MIT | Python | | clickhouse-driver | MIT | Python | | python3-saml | MIT | Python | | kubernetes | Apache2 | Python | From 41127b9fd0dccb711b1f91500b886a4319e9faa3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 24 Mar 2023 17:28:43 +0100 Subject: [PATCH 56/60] feat(chalice): fixed batch notifications feat(DB): changed structure --- api/chalicelib/core/alerts.py | 2 +- .../db/init_dbs/postgresql/init_schema.sql | 19 +++++++++++-------- .../db/init_dbs/postgresql/init_schema.sql | 19 +++++++++++-------- 3 files changed, 23 insertions(+), 17 deletions(-) diff --git a/api/chalicelib/core/alerts.py b/api/chalicelib/core/alerts.py index 3c8b00c54..dfa86ed75 100644 --- a/api/chalicelib/core/alerts.py +++ b/api/chalicelib/core/alerts.py @@ -116,7 +116,7 @@ def process_notifications(data): BATCH_SIZE = 200 for t in full.keys(): for i in range(0, len(full[t]), BATCH_SIZE): - notifications_list = full[t][i:i + BATCH_SIZE] + notifications_list = full[t][i:min(i + BATCH_SIZE, len(full[t]))] if notifications_list is None or len(notifications_list) == 0: break diff --git a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 308acbda1..0418c5d7d 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -947,13 +947,14 @@ $$ CREATE TABLE IF NOT EXISTS events.clicks ( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - url text DEFAULT '' NOT NULL, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + url text DEFAULT '' NOT NULL, path text, - selector text DEFAULT '' NOT NULL, + selector text DEFAULT '' NOT NULL, + hesitation integer DEFAULT NULL, PRIMARY KEY (session_id, message_id) ); CREATE INDEX IF NOT EXISTS clicks_session_id_idx ON events.clicks (session_id); @@ -974,8 +975,10 @@ $$ session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, message_id bigint NOT NULL, timestamp bigint NOT NULL, - label text DEFAULT NULL, - value text DEFAULT NULL, + label text DEFAULT NULL, + value text DEFAULT NULL, + duration integer DEFAULT NULL, + hesitation integer DEFAULT NULL, PRIMARY KEY (session_id, message_id) ); CREATE INDEX IF NOT EXISTS inputs_session_id_idx ON events.inputs (session_id); diff --git a/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 14b6550a2..2fe532636 100644 --- a/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -628,13 +628,14 @@ $$ CREATE TABLE events.clicks ( - session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, - message_id bigint NOT NULL, - timestamp bigint NOT NULL, - label text DEFAULT NULL, - url text DEFAULT '' NOT NULL, + session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, + message_id bigint NOT NULL, + timestamp bigint NOT NULL, + label text DEFAULT NULL, + url text DEFAULT '' NOT NULL, path text, - selector text DEFAULT '' NOT NULL, + selector text DEFAULT '' NOT NULL, + hesitation integer DEFAULT NULL, PRIMARY KEY (session_id, message_id) ); CREATE INDEX clicks_session_id_idx ON events.clicks (session_id); @@ -654,8 +655,10 @@ $$ session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, message_id bigint NOT NULL, timestamp bigint NOT NULL, - label text DEFAULT NULL, - value text DEFAULT NULL, + label text DEFAULT NULL, + value text DEFAULT NULL, + duration integer DEFAULT NULL, + hesitation integer DEFAULT NULL, PRIMARY KEY (session_id, message_id) ); CREATE INDEX inputs_session_id_idx ON events.inputs (session_id); From 9e2839a5e448fbc1cafe6a113538dc72b734ed2b Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Fri, 24 Mar 2023 17:56:00 +0100 Subject: [PATCH 57/60] fix(tracker): finaliseBatch for over-beaconSized messages --- tracker/tracker/src/webworker/BatchWriter.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tracker/tracker/src/webworker/BatchWriter.ts b/tracker/tracker/src/webworker/BatchWriter.ts index d9275ada3..cf7a6e4b4 100644 --- a/tracker/tracker/src/webworker/BatchWriter.ts +++ b/tracker/tracker/src/webworker/BatchWriter.ts @@ -117,11 +117,13 @@ export default class BatchWriter { if (this.writeWithSize(message)) { return } - // buffer is too small. Create one with maximal capacity + // buffer is too small. Creating one with maximal capacity for this message only this.encoder = new MessageEncoder(this.beaconSizeLimit) this.prepare() if (!this.writeWithSize(message)) { console.warn('OpenReplay: beacon size overflow. Skipping large message.', message, this) + } else { + this.finaliseBatch() } // reset encoder to normal size this.encoder = new MessageEncoder(this.beaconSize) From d53de951ffa1c0abf51712f1afe6a63f1a14c361 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 27 Mar 2023 10:26:13 +0200 Subject: [PATCH 58/60] change(tracker): changelogs update; tracker v5.0.2 --- tracker/tracker/CHANGELOG.md | 1 + tracker/tracker/package.json | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/tracker/tracker/CHANGELOG.md b/tracker/tracker/CHANGELOG.md index fa8ab398a..e0ba20c30 100644 --- a/tracker/tracker/CHANGELOG.md +++ b/tracker/tracker/CHANGELOG.md @@ -6,6 +6,7 @@ - Detect cached requests to img, css and js resources; send transferred size - added `{ mouse: { disableClickmaps: boolean } }` to disable calculating el. selectors - added `{ mouse: { minSelectorDepth?: number; nthThreshold?: number; maxOptimiseTries?: number }` for selector finding optimisations +- fixed inline css loading in specific cases when assets gets around min flush size ## 5.0.1 diff --git a/tracker/tracker/package.json b/tracker/tracker/package.json index 7705d49c9..7ac8ba10f 100644 --- a/tracker/tracker/package.json +++ b/tracker/tracker/package.json @@ -1,7 +1,7 @@ { "name": "@openreplay/tracker", "description": "The OpenReplay tracker main package", - "version": "5.0.2-beta.2", + "version": "5.0.2", "keywords": [ "logging", "replay" From b8a59c045a7a07dadff00a9ca2380c4525c0f099 Mon Sep 17 00:00:00 2001 From: Shekar Siri Date: Mon, 27 Mar 2023 12:48:27 +0200 Subject: [PATCH 59/60] change(ui) - sessions listing improvements --- frontend/app/components/Overview/Overview.tsx | 33 +++++++----- .../shared/OverviewMenu/OverviewMenu.tsx | 52 +++++++++++++++++++ .../components/shared/OverviewMenu/index.ts | 1 + .../SessionHeader/SessionHeader.tsx | 40 +++++--------- frontend/app/components/ui/SVG.tsx | 3 +- frontend/app/svg/icons/stickies.svg | 4 ++ 6 files changed, 90 insertions(+), 43 deletions(-) create mode 100644 frontend/app/components/shared/OverviewMenu/OverviewMenu.tsx create mode 100644 frontend/app/components/shared/OverviewMenu/index.ts create mode 100644 frontend/app/svg/icons/stickies.svg diff --git a/frontend/app/components/Overview/Overview.tsx b/frontend/app/components/Overview/Overview.tsx index 78b4bfe2b..9d71b5702 100644 --- a/frontend/app/components/Overview/Overview.tsx +++ b/frontend/app/components/Overview/Overview.tsx @@ -4,25 +4,30 @@ import NoSessionsMessage from 'Shared/NoSessionsMessage'; import MainSearchBar from 'Shared/MainSearchBar'; import SessionSearch from 'Shared/SessionSearch'; import SessionListContainer from 'Shared/SessionListContainer/SessionListContainer'; +import cn from 'classnames'; +import OverviewMenu from 'Shared/OverviewMenu'; function Overview() { - return ( -
-
-
- + return ( +
+
+ +
+
+ -
- - +
+ + -
- -
-
-
+
+
- ); +
+
+ ); } export default withPageTitle('Sessions - OpenReplay')(Overview); diff --git a/frontend/app/components/shared/OverviewMenu/OverviewMenu.tsx b/frontend/app/components/shared/OverviewMenu/OverviewMenu.tsx new file mode 100644 index 000000000..9736c353f --- /dev/null +++ b/frontend/app/components/shared/OverviewMenu/OverviewMenu.tsx @@ -0,0 +1,52 @@ +import React from 'react'; +import { SideMenuitem } from 'UI'; +import { connect } from 'react-redux'; +import { setActiveTab } from 'Duck/search'; + +interface Props { + setActiveTab: (tab: any) => void; + activeTab: string; + isEnterprise: boolean; +} +function OverviewMenu(props: Props) { + const { activeTab, isEnterprise } = props; + + return ( +
+
+ props.setActiveTab({ type: 'all' })} + /> +
+
+
+ props.setActiveTab({ type: 'bookmark' })} + /> +
+
+
+ props.setActiveTab({ type: 'notes' })} + /> +
+
+ ); +} + +export default connect((state: any) => ({ + activeTab: state.getIn(['search', 'activeTab', 'type']), + isEnterprise: state.getIn(['user', 'account', 'edition']) === 'ee', +}), { setActiveTab })(OverviewMenu); diff --git a/frontend/app/components/shared/OverviewMenu/index.ts b/frontend/app/components/shared/OverviewMenu/index.ts new file mode 100644 index 000000000..91599b4c8 --- /dev/null +++ b/frontend/app/components/shared/OverviewMenu/index.ts @@ -0,0 +1 @@ +export { default } from './OverviewMenu'; \ No newline at end of file diff --git a/frontend/app/components/shared/SessionListContainer/components/SessionHeader/SessionHeader.tsx b/frontend/app/components/shared/SessionListContainer/components/SessionHeader/SessionHeader.tsx index ebf0ad48b..9222b8183 100644 --- a/frontend/app/components/shared/SessionListContainer/components/SessionHeader/SessionHeader.tsx +++ b/frontend/app/components/shared/SessionListContainer/components/SessionHeader/SessionHeader.tsx @@ -47,40 +47,24 @@ function SessionHeader(props: Props) { }; return ( -
-
-
- props.setActiveTab({ type: 'all' })} addBorder={activeTab === 'all'}> - SESSIONS - - props.setActiveTab({ type: 'bookmark' })} - addBorder={activeTab === 'bookmark'} - > - {`${isEnterprise ? 'VAULT' : 'BOOKMARKS'}`} - - props.setActiveTab({ type: 'notes' })} - > - NOTES - -
-
- - {activeTab !== 'notes' && activeTab !== 'bookmark' ? ( -
- -
- -
+
+ {activeTab !== 'notes' ? ( +
+ {activeTab !== 'bookmark' && ( + <> + +
+ +
+ + )}
) : null} {activeTab === 'notes' && ( -
+
)} diff --git a/frontend/app/components/ui/SVG.tsx b/frontend/app/components/ui/SVG.tsx index a2322c730..0318659d6 100644 --- a/frontend/app/components/ui/SVG.tsx +++ b/frontend/app/components/ui/SVG.tsx @@ -1,7 +1,7 @@ import React from 'react'; -export type IconNames = 'activity' | 'alarm-clock' | 'alarm-plus' | 'all-sessions' | 'analytics' | 'anchor' | 'arrow-alt-square-right' | 'arrow-bar-left' | 'arrow-clockwise' | 'arrow-counterclockwise' | 'arrow-down-short' | 'arrow-down' | 'arrow-repeat' | 'arrow-right-short' | 'arrow-square-left' | 'arrow-square-right' | 'arrow-up-short' | 'arrow-up' | 'arrows-angle-extend' | 'avatar/icn_bear' | 'avatar/icn_beaver' | 'avatar/icn_bird' | 'avatar/icn_bison' | 'avatar/icn_camel' | 'avatar/icn_chameleon' | 'avatar/icn_deer' | 'avatar/icn_dog' | 'avatar/icn_dolphin' | 'avatar/icn_elephant' | 'avatar/icn_fish' | 'avatar/icn_fox' | 'avatar/icn_gorilla' | 'avatar/icn_hippo' | 'avatar/icn_horse' | 'avatar/icn_hyena' | 'avatar/icn_kangaroo' | 'avatar/icn_lemur' | 'avatar/icn_mammel' | 'avatar/icn_monkey' | 'avatar/icn_moose' | 'avatar/icn_panda' | 'avatar/icn_penguin' | 'avatar/icn_porcupine' | 'avatar/icn_quail' | 'avatar/icn_rabbit' | 'avatar/icn_rhino' | 'avatar/icn_sea_horse' | 'avatar/icn_sheep' | 'avatar/icn_snake' | 'avatar/icn_squirrel' | 'avatar/icn_tapir' | 'avatar/icn_turtle' | 'avatar/icn_vulture' | 'avatar/icn_wild1' | 'avatar/icn_wild_bore' | 'ban' | 'bar-chart-line' | 'bar-pencil' | 'bell-fill' | 'bell-plus' | 'bell-slash' | 'bell' | 'binoculars' | 'book-doc' | 'book' | 'browser/browser' | 'browser/chrome' | 'browser/edge' | 'browser/electron' | 'browser/facebook' | 'browser/firefox' | 'browser/ie' | 'browser/opera' | 'browser/safari' | 'bullhorn' | 'business-time' | 'calendar-alt' | 'calendar-check' | 'calendar-day' | 'calendar' | 'call' | 'camera-alt' | 'camera-video-off' | 'camera-video' | 'camera' | 'card-checklist' | 'card-text' | 'caret-down-fill' | 'caret-left-fill' | 'caret-right-fill' | 'caret-up-fill' | 'chat-dots' | 'chat-right-text' | 'chat-square-quote' | 'check-circle-fill' | 'check-circle' | 'check' | 'chevron-double-left' | 'chevron-double-right' | 'chevron-down' | 'chevron-left' | 'chevron-right' | 'chevron-up' | 'circle-fill' | 'circle' | 'click-hesitation' | 'click-rage' | 'clipboard-list-check' | 'clock' | 'close' | 'cloud-fog2-fill' | 'code' | 'cog' | 'cogs' | 'collection' | 'columns-gap-filled' | 'columns-gap' | 'console/error' | 'console/exception' | 'console/info' | 'console/warning' | 'console' | 'controller' | 'cookies' | 'copy' | 'credit-card-front' | 'cross' | 'cubes' | 'cursor-trash' | 'dash' | 'dashboard-icn' | 'desktop' | 'device' | 'diagram-3' | 'dizzy' | 'door-closed' | 'doublecheck' | 'download' | 'drag' | 'edit' | 'ellipsis-v' | 'enter' | 'envelope' | 'errors-icon' | 'event/click' | 'event/click_hesitation' | 'event/clickrage' | 'event/code' | 'event/i-cursor' | 'event/input' | 'event/input_hesitation' | 'event/link' | 'event/location' | 'event/mouse_thrashing' | 'event/resize' | 'event/view' | 'exclamation-circle-fill' | 'exclamation-circle' | 'expand-wide' | 'explosion' | 'external-link-alt' | 'eye-slash-fill' | 'eye-slash' | 'eye' | 'fetch' | 'file-code' | 'file-medical-alt' | 'file-pdf' | 'file' | 'files' | 'filter' | 'filters/arrow-return-right' | 'filters/browser' | 'filters/click' | 'filters/clickrage' | 'filters/code' | 'filters/console' | 'filters/country' | 'filters/cpu-load' | 'filters/custom' | 'filters/device' | 'filters/dom-complete' | 'filters/duration' | 'filters/error' | 'filters/fetch-failed' | 'filters/fetch' | 'filters/file-code' | 'filters/graphql' | 'filters/i-cursor' | 'filters/input' | 'filters/lcpt' | 'filters/link' | 'filters/location' | 'filters/memory-load' | 'filters/metadata' | 'filters/os' | 'filters/perfromance-network-request' | 'filters/platform' | 'filters/referrer' | 'filters/resize' | 'filters/rev-id' | 'filters/state-action' | 'filters/ttfb' | 'filters/user-alt' | 'filters/userid' | 'filters/view' | 'flag-na' | 'folder-plus' | 'folder2' | 'fullscreen' | 'funnel/cpu-fill' | 'funnel/cpu' | 'funnel/dizzy' | 'funnel/emoji-angry-fill' | 'funnel/emoji-angry' | 'funnel/emoji-dizzy-fill' | 'funnel/exclamation-circle-fill' | 'funnel/exclamation-circle' | 'funnel/file-earmark-break-fill' | 'funnel/file-earmark-break' | 'funnel/file-earmark-minus-fill' | 'funnel/file-earmark-minus' | 'funnel/file-medical-alt' | 'funnel/file-x' | 'funnel/hdd-fill' | 'funnel/hourglass-top' | 'funnel/image-fill' | 'funnel/image' | 'funnel/microchip' | 'funnel/mouse' | 'funnel/patch-exclamation-fill' | 'funnel/sd-card' | 'funnel-fill' | 'funnel-new' | 'funnel' | 'gear-fill' | 'gear' | 'geo-alt-fill-custom' | 'github' | 'graph-up-arrow' | 'graph-up' | 'grid-1x2' | 'grid-3x3' | 'grid-check' | 'grid-horizontal' | 'grid' | 'grip-horizontal' | 'hash' | 'hdd-stack' | 'headset' | 'heart-rate' | 'high-engagement' | 'history' | 'hourglass-start' | 'ic-errors' | 'ic-network' | 'ic-rage' | 'ic-resources' | 'id-card' | 'image' | 'info-circle-fill' | 'info-circle' | 'info-square' | 'info' | 'input-hesitation' | 'inspect' | 'integrations/assist' | 'integrations/bugsnag-text' | 'integrations/bugsnag' | 'integrations/cloudwatch-text' | 'integrations/cloudwatch' | 'integrations/datadog' | 'integrations/elasticsearch-text' | 'integrations/elasticsearch' | 'integrations/github' | 'integrations/graphql' | 'integrations/jira-text' | 'integrations/jira' | 'integrations/mobx' | 'integrations/newrelic-text' | 'integrations/newrelic' | 'integrations/ngrx' | 'integrations/openreplay-text' | 'integrations/openreplay' | 'integrations/redux' | 'integrations/rollbar-text' | 'integrations/rollbar' | 'integrations/segment' | 'integrations/sentry-text' | 'integrations/sentry' | 'integrations/slack-bw' | 'integrations/slack' | 'integrations/stackdriver' | 'integrations/sumologic-text' | 'integrations/sumologic' | 'integrations/teams-white' | 'integrations/teams' | 'integrations/vuejs' | 'journal-code' | 'layer-group' | 'lightbulb-on' | 'lightbulb' | 'link-45deg' | 'list-alt' | 'list-arrow' | 'list-ul' | 'list' | 'lock-alt' | 'magic' | 'map-marker-alt' | 'memory' | 'mic-mute' | 'mic' | 'minus' | 'mobile' | 'mouse-alt' | 'network' | 'next1' | 'no-dashboard' | 'no-metrics-chart' | 'no-metrics' | 'no-recordings' | 'os/android' | 'os/chrome_os' | 'os/fedora' | 'os/ios' | 'os/linux' | 'os/mac_os_x' | 'os/other' | 'os/ubuntu' | 'os/windows' | 'os' | 'pause-fill' | 'pause' | 'pdf-download' | 'pencil-stop' | 'pencil' | 'percent' | 'performance-icon' | 'person-fill' | 'person' | 'pie-chart-fill' | 'pin-fill' | 'play-circle-bold' | 'play-circle-light' | 'play-circle' | 'play-fill-new' | 'play-fill' | 'play-hover' | 'play' | 'plus-circle' | 'plus-lg' | 'plus' | 'pointer-sessions-search' | 'prev1' | 'pulse' | 'puzzle-piece' | 'puzzle' | 'question-circle' | 'question-lg' | 'quote-left' | 'quote-right' | 'quotes' | 'record-circle' | 'redo-back' | 'redo' | 'remote-control' | 'replay-10' | 'resources-icon' | 'safe-fill' | 'safe' | 'sandglass' | 'search' | 'search_notification' | 'server' | 'share-alt' | 'shield-lock' | 'signpost-split' | 'signup' | 'skip-forward-fill' | 'skip-forward' | 'slack' | 'slash-circle' | 'sliders' | 'social/slack' | 'social/trello' | 'speedometer2' | 'spinner' | 'star-solid' | 'star' | 'step-forward' | 'stop-record-circle' | 'stopwatch' | 'store' | 'sync-alt' | 'table-new' | 'table' | 'tablet-android' | 'tachometer-slow' | 'tachometer-slowest' | 'tags' | 'team-funnel' | 'telephone-fill' | 'telephone' | 'text-paragraph' | 'tools' | 'trash' | 'turtle' | 'user-alt' | 'user-circle' | 'user-friends' | 'users' | 'vendors/graphql' | 'vendors/mobx' | 'vendors/ngrx' | 'vendors/redux' | 'vendors/vuex' | 'web-vitals' | 'wifi' | 'window-alt' | 'window-restore' | 'window-x' | 'window' | 'zoom-in'; +export type IconNames = 'activity' | 'alarm-clock' | 'alarm-plus' | 'all-sessions' | 'analytics' | 'anchor' | 'arrow-alt-square-right' | 'arrow-bar-left' | 'arrow-clockwise' | 'arrow-counterclockwise' | 'arrow-down-short' | 'arrow-down' | 'arrow-repeat' | 'arrow-right-short' | 'arrow-square-left' | 'arrow-square-right' | 'arrow-up-short' | 'arrow-up' | 'arrows-angle-extend' | 'avatar/icn_bear' | 'avatar/icn_beaver' | 'avatar/icn_bird' | 'avatar/icn_bison' | 'avatar/icn_camel' | 'avatar/icn_chameleon' | 'avatar/icn_deer' | 'avatar/icn_dog' | 'avatar/icn_dolphin' | 'avatar/icn_elephant' | 'avatar/icn_fish' | 'avatar/icn_fox' | 'avatar/icn_gorilla' | 'avatar/icn_hippo' | 'avatar/icn_horse' | 'avatar/icn_hyena' | 'avatar/icn_kangaroo' | 'avatar/icn_lemur' | 'avatar/icn_mammel' | 'avatar/icn_monkey' | 'avatar/icn_moose' | 'avatar/icn_panda' | 'avatar/icn_penguin' | 'avatar/icn_porcupine' | 'avatar/icn_quail' | 'avatar/icn_rabbit' | 'avatar/icn_rhino' | 'avatar/icn_sea_horse' | 'avatar/icn_sheep' | 'avatar/icn_snake' | 'avatar/icn_squirrel' | 'avatar/icn_tapir' | 'avatar/icn_turtle' | 'avatar/icn_vulture' | 'avatar/icn_wild1' | 'avatar/icn_wild_bore' | 'ban' | 'bar-chart-line' | 'bar-pencil' | 'bell-fill' | 'bell-plus' | 'bell-slash' | 'bell' | 'binoculars' | 'book-doc' | 'book' | 'browser/browser' | 'browser/chrome' | 'browser/edge' | 'browser/electron' | 'browser/facebook' | 'browser/firefox' | 'browser/ie' | 'browser/opera' | 'browser/safari' | 'bullhorn' | 'business-time' | 'calendar-alt' | 'calendar-check' | 'calendar-day' | 'calendar' | 'call' | 'camera-alt' | 'camera-video-off' | 'camera-video' | 'camera' | 'card-checklist' | 'card-text' | 'caret-down-fill' | 'caret-left-fill' | 'caret-right-fill' | 'caret-up-fill' | 'chat-dots' | 'chat-right-text' | 'chat-square-quote' | 'check-circle-fill' | 'check-circle' | 'check' | 'chevron-double-left' | 'chevron-double-right' | 'chevron-down' | 'chevron-left' | 'chevron-right' | 'chevron-up' | 'circle-fill' | 'circle' | 'click-hesitation' | 'click-rage' | 'clipboard-list-check' | 'clock' | 'close' | 'cloud-fog2-fill' | 'code' | 'cog' | 'cogs' | 'collection' | 'columns-gap-filled' | 'columns-gap' | 'console/error' | 'console/exception' | 'console/info' | 'console/warning' | 'console' | 'controller' | 'cookies' | 'copy' | 'credit-card-front' | 'cross' | 'cubes' | 'cursor-trash' | 'dash' | 'dashboard-icn' | 'desktop' | 'device' | 'diagram-3' | 'dizzy' | 'door-closed' | 'doublecheck' | 'download' | 'drag' | 'edit' | 'ellipsis-v' | 'enter' | 'envelope' | 'errors-icon' | 'event/click' | 'event/click_hesitation' | 'event/clickrage' | 'event/code' | 'event/i-cursor' | 'event/input' | 'event/input_hesitation' | 'event/link' | 'event/location' | 'event/mouse_thrashing' | 'event/resize' | 'event/view' | 'exclamation-circle-fill' | 'exclamation-circle' | 'expand-wide' | 'explosion' | 'external-link-alt' | 'eye-slash-fill' | 'eye-slash' | 'eye' | 'fetch' | 'file-code' | 'file-medical-alt' | 'file-pdf' | 'file' | 'files' | 'filter' | 'filters/arrow-return-right' | 'filters/browser' | 'filters/click' | 'filters/clickrage' | 'filters/code' | 'filters/console' | 'filters/country' | 'filters/cpu-load' | 'filters/custom' | 'filters/device' | 'filters/dom-complete' | 'filters/duration' | 'filters/error' | 'filters/fetch-failed' | 'filters/fetch' | 'filters/file-code' | 'filters/graphql' | 'filters/i-cursor' | 'filters/input' | 'filters/lcpt' | 'filters/link' | 'filters/location' | 'filters/memory-load' | 'filters/metadata' | 'filters/os' | 'filters/perfromance-network-request' | 'filters/platform' | 'filters/referrer' | 'filters/resize' | 'filters/rev-id' | 'filters/state-action' | 'filters/ttfb' | 'filters/user-alt' | 'filters/userid' | 'filters/view' | 'flag-na' | 'folder-plus' | 'folder2' | 'fullscreen' | 'funnel/cpu-fill' | 'funnel/cpu' | 'funnel/dizzy' | 'funnel/emoji-angry-fill' | 'funnel/emoji-angry' | 'funnel/emoji-dizzy-fill' | 'funnel/exclamation-circle-fill' | 'funnel/exclamation-circle' | 'funnel/file-earmark-break-fill' | 'funnel/file-earmark-break' | 'funnel/file-earmark-minus-fill' | 'funnel/file-earmark-minus' | 'funnel/file-medical-alt' | 'funnel/file-x' | 'funnel/hdd-fill' | 'funnel/hourglass-top' | 'funnel/image-fill' | 'funnel/image' | 'funnel/microchip' | 'funnel/mouse' | 'funnel/patch-exclamation-fill' | 'funnel/sd-card' | 'funnel-fill' | 'funnel-new' | 'funnel' | 'gear-fill' | 'gear' | 'geo-alt-fill-custom' | 'github' | 'graph-up-arrow' | 'graph-up' | 'grid-1x2' | 'grid-3x3' | 'grid-check' | 'grid-horizontal' | 'grid' | 'grip-horizontal' | 'hash' | 'hdd-stack' | 'headset' | 'heart-rate' | 'high-engagement' | 'history' | 'hourglass-start' | 'ic-errors' | 'ic-network' | 'ic-rage' | 'ic-resources' | 'id-card' | 'image' | 'info-circle-fill' | 'info-circle' | 'info-square' | 'info' | 'input-hesitation' | 'inspect' | 'integrations/assist' | 'integrations/bugsnag-text' | 'integrations/bugsnag' | 'integrations/cloudwatch-text' | 'integrations/cloudwatch' | 'integrations/datadog' | 'integrations/elasticsearch-text' | 'integrations/elasticsearch' | 'integrations/github' | 'integrations/graphql' | 'integrations/jira-text' | 'integrations/jira' | 'integrations/mobx' | 'integrations/newrelic-text' | 'integrations/newrelic' | 'integrations/ngrx' | 'integrations/openreplay-text' | 'integrations/openreplay' | 'integrations/redux' | 'integrations/rollbar-text' | 'integrations/rollbar' | 'integrations/segment' | 'integrations/sentry-text' | 'integrations/sentry' | 'integrations/slack-bw' | 'integrations/slack' | 'integrations/stackdriver' | 'integrations/sumologic-text' | 'integrations/sumologic' | 'integrations/teams-white' | 'integrations/teams' | 'integrations/vuejs' | 'journal-code' | 'layer-group' | 'lightbulb-on' | 'lightbulb' | 'link-45deg' | 'list-alt' | 'list-arrow' | 'list-ul' | 'list' | 'lock-alt' | 'magic' | 'map-marker-alt' | 'memory' | 'mic-mute' | 'mic' | 'minus' | 'mobile' | 'mouse-alt' | 'network' | 'next1' | 'no-dashboard' | 'no-metrics-chart' | 'no-metrics' | 'no-recordings' | 'os/android' | 'os/chrome_os' | 'os/fedora' | 'os/ios' | 'os/linux' | 'os/mac_os_x' | 'os/other' | 'os/ubuntu' | 'os/windows' | 'os' | 'pause-fill' | 'pause' | 'pdf-download' | 'pencil-stop' | 'pencil' | 'percent' | 'performance-icon' | 'person-fill' | 'person' | 'pie-chart-fill' | 'pin-fill' | 'play-circle-bold' | 'play-circle-light' | 'play-circle' | 'play-fill-new' | 'play-fill' | 'play-hover' | 'play' | 'plus-circle' | 'plus-lg' | 'plus' | 'pointer-sessions-search' | 'prev1' | 'pulse' | 'puzzle-piece' | 'puzzle' | 'question-circle' | 'question-lg' | 'quote-left' | 'quote-right' | 'quotes' | 'record-circle' | 'redo-back' | 'redo' | 'remote-control' | 'replay-10' | 'resources-icon' | 'safe-fill' | 'safe' | 'sandglass' | 'search' | 'search_notification' | 'server' | 'share-alt' | 'shield-lock' | 'signpost-split' | 'signup' | 'skip-forward-fill' | 'skip-forward' | 'slack' | 'slash-circle' | 'sliders' | 'social/slack' | 'social/trello' | 'speedometer2' | 'spinner' | 'star-solid' | 'star' | 'step-forward' | 'stickies' | 'stop-record-circle' | 'stopwatch' | 'store' | 'sync-alt' | 'table-new' | 'table' | 'tablet-android' | 'tachometer-slow' | 'tachometer-slowest' | 'tags' | 'team-funnel' | 'telephone-fill' | 'telephone' | 'text-paragraph' | 'tools' | 'trash' | 'turtle' | 'user-alt' | 'user-circle' | 'user-friends' | 'users' | 'vendors/graphql' | 'vendors/mobx' | 'vendors/ngrx' | 'vendors/redux' | 'vendors/vuex' | 'web-vitals' | 'wifi' | 'window-alt' | 'window-restore' | 'window-x' | 'window' | 'zoom-in'; interface Props { name: IconNames; @@ -406,6 +406,7 @@ const SVG = (props: Props) => { case 'star-solid': return ; case 'star': return ; case 'step-forward': return ; + case 'stickies': return ; case 'stop-record-circle': return ; case 'stopwatch': return ; case 'store': return ; diff --git a/frontend/app/svg/icons/stickies.svg b/frontend/app/svg/icons/stickies.svg new file mode 100644 index 000000000..bf752b965 --- /dev/null +++ b/frontend/app/svg/icons/stickies.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file From a4a2089698ac3562b96b329636e1cced552b7c36 Mon Sep 17 00:00:00 2001 From: nick-delirium Date: Mon, 27 Mar 2023 12:55:11 +0200 Subject: [PATCH 60/60] change(ui): remove isjump, improve error logging for dommanager; improve healthstatus API display --- .../Header/HealthStatus/HealthStatus.tsx | 5 +- .../Header/HealthStatus/HealthWidget.tsx | 10 ++-- frontend/app/player/common/types.ts | 2 +- frontend/app/player/player/Animator.ts | 11 +++-- frontend/app/player/web/MessageManager.ts | 4 +- frontend/app/player/web/WebLivePlayer.ts | 2 +- .../app/player/web/managers/DOM/DOMManager.ts | 49 +++++++++---------- .../app/player/web/managers/PagesManager.ts | 4 +- frontend/app/services/HealthService.ts | 1 - 9 files changed, 46 insertions(+), 42 deletions(-) diff --git a/frontend/app/components/Header/HealthStatus/HealthStatus.tsx b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx index 317b36279..283ba6f21 100644 --- a/frontend/app/components/Header/HealthStatus/HealthStatus.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthStatus.tsx @@ -22,6 +22,7 @@ export interface IServiceStats { function HealthStatus() { const healthResponseSaved = localStorage.getItem(healthResponseKey) || '{}'; const [healthResponse, setHealthResponse] = React.useState(JSON.parse(healthResponseSaved)); + const [isError, setIsError] = React.useState(false); const [isLoading, setIsLoading] = React.useState(false); const lastAskedSaved = localStorage.getItem(lastAskedKey); const [lastAsked, setLastAsked] = React.useState(lastAskedSaved); @@ -36,6 +37,7 @@ function HealthStatus() { setLastAsked(asked.toString()); } catch (e) { console.error(e); + setIsError(true); } finally { setIsLoading(false); } @@ -51,7 +53,7 @@ function HealthStatus() { } }, []); - const icon = healthResponse?.overallHealth ? 'pulse' : ('exclamation-circle-fill' as const); + const icon = !isError && healthResponse?.overallHealth ? 'pulse' : ('exclamation-circle-fill' as const); return ( <>
@@ -71,6 +73,7 @@ function HealthStatus() { isLoading={isLoading} lastAsked={lastAsked} setShowModal={setShowModal} + isError={isError} />
{showModal ? ( diff --git a/frontend/app/components/Header/HealthStatus/HealthWidget.tsx b/frontend/app/components/Header/HealthStatus/HealthWidget.tsx index 50b4de76d..6a184f854 100644 --- a/frontend/app/components/Header/HealthStatus/HealthWidget.tsx +++ b/frontend/app/components/Header/HealthStatus/HealthWidget.tsx @@ -10,12 +10,14 @@ function HealthWidget({ isLoading, lastAsked, setShowModal, + isError, }: { healthResponse: { overallHealth: boolean; healthMap: Record }; getHealth: Function; isLoading: boolean; lastAsked: string | null; setShowModal: (visible: boolean) => void; + isError?: boolean; }) { const [lastAskedDiff, setLastAskedDiff] = React.useState(0); const healthOk = healthResponse?.overallHealth; @@ -28,8 +30,8 @@ function HealthWidget({ setLastAskedDiff(diffInMinutes); }, [lastAsked]); - const title = healthOk ? 'All Systems Operational' : 'Service disruption'; - const icon = healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); + const title = !isError && healthOk ? 'All Systems Operational' : 'Service disruption'; + const icon = !isError && healthOk ? ('check-circle-fill' as const) : ('exclamation-circle-fill' as const); const problematicServices = Object.values(healthResponse?.healthMap || {}).filter( (service: Record) => !service.healthOk @@ -65,10 +67,12 @@ function HealthWidget({
+ {isError &&
Error getting service health status
} +
- {!healthOk ? ( + {!isError && !healthOk ? ( <>
Observed installation Issue with the following diff --git a/frontend/app/player/common/types.ts b/frontend/app/player/common/types.ts index 7df4f6f6b..308ec0659 100644 --- a/frontend/app/player/common/types.ts +++ b/frontend/app/player/common/types.ts @@ -7,7 +7,7 @@ export interface Indexed { } export interface Moveable { - move(time: number, isJump?: boolean): void + move(time: number): void } export interface Cleanable { diff --git a/frontend/app/player/player/Animator.ts b/frontend/app/player/player/Animator.ts index 9423b5785..55d38432c 100644 --- a/frontend/app/player/player/Animator.ts +++ b/frontend/app/player/player/Animator.ts @@ -1,4 +1,5 @@ import type { Store, Moveable, Interval } from '../common/types'; +import MessageManager from 'App/player/web/MessageManager' const fps = 60 const performance: { now: () => number } = window.performance || { now: Date.now.bind(Date) } @@ -54,18 +55,18 @@ export default class Animator { private animationFrameRequestId: number = 0 - constructor(private store: Store, private mm: Moveable) { + constructor(private store: Store, private mm: MessageManager) { // @ts-ignore window.playerJump = this.jump.bind(this) } - private setTime(time: number, isJump?: boolean) { + private setTime(time: number) { this.store.update({ time, completed: false, }) - this.mm.move(time, isJump) + this.mm.move(time) } private startAnimation() { @@ -183,11 +184,11 @@ export default class Animator { jump = (time: number) => { if (this.store.get().playing) { cancelAnimationFrame(this.animationFrameRequestId) - this.setTime(time, true) + this.setTime(time) this.startAnimation() this.store.update({ livePlay: time === this.store.get().endTime }) } else { - this.setTime(time, true) + this.setTime(time) this.store.update({ livePlay: time === this.store.get().endTime }) } } diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index 2772ccbd2..b343eef96 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -289,7 +289,7 @@ export default class MessageManager { this.activityManager = new ActivityManager(this.session.duration.milliseconds); } - move(t: number, isJump?: boolean, index?: number): void { + move(t: number, index?: number): void { const stateToUpdate: Partial = {}; /* == REFACTOR_ME == */ const lastLoadedLocationMsg = this.loadedLocationManager.moveGetLast(t, index); @@ -339,7 +339,7 @@ export default class MessageManager { if (!!lastResize) { this.setSize(lastResize) } - this.pagesManager.moveReady(t, isJump).then(() => { + this.pagesManager.moveReady(t).then(() => { const lastScroll = this.scrollManager.moveGetLast(t, index); if (!!lastScroll && this.screen.window) { diff --git a/frontend/app/player/web/WebLivePlayer.ts b/frontend/app/player/web/WebLivePlayer.ts index 709692d20..7ed1e3400 100644 --- a/frontend/app/player/web/WebLivePlayer.ts +++ b/frontend/app/player/web/WebLivePlayer.ts @@ -56,7 +56,7 @@ export default class WebLivePlayer extends WebPlayer { const bytes = await requestEFSDom(this.session.sessionId) const fileReader = new MFileReader(bytes, this.session.startedAt) for (let msg = fileReader.readNext();msg !== null;msg = fileReader.readNext()) { - this.messageManager.distributeMessage(msg, msg._index) + this.messageManager.distributeMessage(msg) } this.wpState.update({ liveTimeTravel: true, diff --git a/frontend/app/player/web/managers/DOM/DOMManager.ts b/frontend/app/player/web/managers/DOM/DOMManager.ts index a5c2f2c2f..d54781028 100644 --- a/frontend/app/player/web/managers/DOM/DOMManager.ts +++ b/frontend/app/player/web/managers/DOM/DOMManager.ts @@ -142,7 +142,7 @@ export default class DOMManager extends ListWalker { private setNodeAttribute(msg: { id: number, name: string, value: string }) { let { name, value } = msg; const vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("SetNodeAttribute: Node not found", msg); return } if (vn.node.tagName === "INPUT" && name === "name") { // Otherwise binds local autocomplete values (maybe should ignore on the tracker level) @@ -169,7 +169,7 @@ export default class DOMManager extends ListWalker { this.removeBodyScroll(msg.id, vn) } - private applyMessage = (msg: Message, isJump?: boolean): Promise | undefined => { + private applyMessage = (msg: Message): Promise | undefined => { let vn: VNode | undefined let doc: Document | null let styleSheet: CSSStyleSheet | PostponedStyleSheet | undefined @@ -230,14 +230,14 @@ export default class DOMManager extends ListWalker { return case MType.RemoveNode: vn = this.vElements.get(msg.id) || this.vTexts.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } - if (!vn.parentNode) { logger.error("Parent node not found", msg); return } + if (!vn) { logger.error("RemoveNode: Node not found", msg); return } + if (!vn.parentNode) { logger.error("RemoveNode: Parent node not found", msg); return } vn.parentNode.removeChild(vn) this.vElements.delete(msg.id) this.vTexts.delete(msg.id) return case MType.SetNodeAttribute: - if (isJump && msg.name === 'href') this.attrsBacktrack.push(msg) + if (msg.name === 'href') this.attrsBacktrack.push(msg) else this.setNodeAttribute(msg) return case MType.StringDict: @@ -247,7 +247,7 @@ export default class DOMManager extends ListWalker { this.stringDict[msg.nameKey] === undefined && logger.error("No dictionary key for msg 'name': ", msg) this.stringDict[msg.valueKey] === undefined && logger.error("No dictionary key for msg 'value': ", msg) if (this.stringDict[msg.nameKey] === undefined || this.stringDict[msg.valueKey] === undefined ) { return } - if (isJump && this.stringDict[msg.nameKey] === 'href') this.attrsBacktrack.push(msg) + if (this.stringDict[msg.nameKey] === 'href') this.attrsBacktrack.push(msg) else { this.setNodeAttribute({ id: msg.id, @@ -258,12 +258,12 @@ export default class DOMManager extends ListWalker { return case MType.RemoveNodeAttribute: vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("RemoveNodeAttribute: Node not found", msg); return } vn.removeAttribute(msg.name) return case MType.SetInputValue: vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("SetInoputValue: Node not found", msg); return } const nodeWithValue = vn.node if (!(nodeWithValue instanceof HTMLInputElement || nodeWithValue instanceof HTMLTextAreaElement @@ -283,13 +283,13 @@ export default class DOMManager extends ListWalker { return case MType.SetInputChecked: vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("SetInputChecked: Node not found", msg); return } (vn.node as HTMLInputElement).checked = msg.checked return case MType.SetNodeData: case MType.SetCssData: // mbtodo: remove css transitions when timeflow is not natural (on jumps) vn = this.vTexts.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("SetCssData: Node not found", msg); return } vn.setData(msg.data) if (vn.node instanceof HTMLStyleElement) { doc = this.screen.document @@ -304,7 +304,7 @@ export default class DOMManager extends ListWalker { // @deprecated since 4.0.2 in favor of adopted_ss_insert/delete_rule + add_owner as being common case for StyleSheets case MType.CssInsertRule: vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("CssInsertRule: Node not found", msg); return } if (!(vn instanceof VStyleElement)) { logger.warn("Non-style node in CSS rules message (or sheet is null)", msg, vn); return @@ -313,7 +313,7 @@ export default class DOMManager extends ListWalker { return case MType.CssDeleteRule: vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("CssDeleteRule: Node not found", msg); return } if (!(vn instanceof VStyleElement)) { logger.warn("Non-style node in CSS rules message (or sheet is null)", msg, vn); return @@ -324,7 +324,7 @@ export default class DOMManager extends ListWalker { case MType.CreateIFrameDocument: vn = this.vElements.get(msg.frameID) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("CreateIFrameDocument: Node not found", msg); return } vn.enforceInsertion() const host = vn.node if (host instanceof HTMLIFrameElement) { @@ -384,7 +384,7 @@ export default class DOMManager extends ListWalker { if (!vn) { // non-constructed case vn = this.vElements.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("AdoptedSsAddOwner: Node not found", msg); return } if (!(vn instanceof VStyleElement)) { logger.error("Non-style owner", msg); return } this.ppStyleSheets.set(msg.sheetID, new PostponedStyleSheet(vn.node)) return @@ -411,13 +411,13 @@ export default class DOMManager extends ListWalker { return } vn = this.vRoots.get(msg.id) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("AdoptedSsRemoveOwner: Node not found", msg); return } //@ts-ignore vn.node.adoptedStyleSheets = [...vn.node.adoptedStyleSheets].filter(s => s !== styleSheet) return case MType.LoadFontFace: vn = this.vRoots.get(msg.parentID) - if (!vn) { logger.error("Node not found", msg); return } + if (!vn) { logger.error("LoadFontFace: Node not found", msg); return } if (vn instanceof VShadowRoot) { logger.error(`Node ${vn} expected to be a Document`, msg); return } let descr: Object try { @@ -460,7 +460,7 @@ export default class DOMManager extends ListWalker { } } - async moveReady(t: number, isJump?: boolean): Promise { + async moveReady(t: number): Promise { // MBTODO (back jump optimisation): // - store intemediate virtual dom state // - cancel previous moveReady tasks (is it possible?) if new timestamp is less @@ -474,16 +474,13 @@ export default class DOMManager extends ListWalker { * */ // http://0.0.0.0:3333/5/session/8452905874437457 // 70 iframe, 8 create element - STYLE tag - await this.moveWait(t, (msg) => { - this.applyMessage(msg, isJump) - }) + await this.moveWait(t, this.applyMessage) + + this.attrsBacktrack.forEach(msg => { + this.applyBacktrack(msg) + }) + this.attrsBacktrack = [] - if (isJump) { - this.attrsBacktrack.forEach(msg => { - this.applyBacktrack(msg) - }) - this.attrsBacktrack = [] - } this.vRoots.forEach(rt => rt.applyChanges()) // MBTODO (optimisation): affected set // Thinkabout (read): css preload // What if we go back before it is ready? We'll have two handlres? diff --git a/frontend/app/player/web/managers/PagesManager.ts b/frontend/app/player/web/managers/PagesManager.ts index b30f40372..dbc64bb72 100644 --- a/frontend/app/player/web/managers/PagesManager.ts +++ b/frontend/app/player/web/managers/PagesManager.ts @@ -33,14 +33,14 @@ export default class PagesManager extends ListWalker { this.forEach(page => page.sort(comparator)) } - moveReady(t: number, isJump?: boolean): Promise { + moveReady(t: number): Promise { const requiredPage = this.moveGetLast(t) if (requiredPage != null) { this.currentPage = requiredPage this.currentPage.reset() // Otherwise it won't apply create_document } if (this.currentPage != null) { - return this.currentPage.moveReady(t, isJump) + return this.currentPage.moveReady(t) } return Promise.resolve() } diff --git a/frontend/app/services/HealthService.ts b/frontend/app/services/HealthService.ts index 019863bb3..7d2b3cc7f 100644 --- a/frontend/app/services/HealthService.ts +++ b/frontend/app/services/HealthService.ts @@ -5,6 +5,5 @@ export default class HealthService extends BaseService { return this.client.get('/health') .then(r => r.json()) .then(j => j.data || {}) - .catch(Promise.reject) } } \ No newline at end of file