From a29c02b43abf0054c1a6f33918a22cc60f24dc4b Mon Sep 17 00:00:00 2001 From: Kraiem Taha Yassine Date: Thu, 16 Dec 2021 19:10:12 +0100 Subject: [PATCH] Api FastApi (#252) * feat(api): dynamic-api 1/2 * feat(api): dynamic-api 2/2 feat(api): core-api 1/2 * feat(api): changed schemas feat(api): aipkey authorizer feat(api): jwt authorizer payload feat(api): core-api 2/3 * feat(api): apikey authorizer feat(api): shared context feat(api): response editor feat(api): middleware feat(api): custom router feat(api): fix auth double call * feat(api): dashboard feat(api): insights feat(api): public api v1 * feat(api): allow full CORS * feat(api): use decouple-config instead of env feat(api): fixed conflict slack endpoint feat(api): fixed favorite errors param * feat(api): migration fixes * feat(api): changes * feat(api): crons * feat(api): changes and fixes * feat(api): added new endpoints feat(api): applied new changes feat(api): Docker image * feat(api): EE 1/4 * feat(api): EE core_dynamic * feat(api): global routers generator * feat(api): project authorizer feat(api): docker image feat(api): crons * feat(api): EE trace activity * feat(api): changed ORRouter * feat(api): EE trace activity parameters&payload * feat(api): EE trace activity action name & path_format * feat(db): user trace * feat(api): EE trace activity ignore routes and hide attribute feat(api): fix funnel payload schema * feat(api): mobile support * feat(api): changed build script * feat(api): changed mobile sign endpoint feat(api): changed requirements.txt * feat(api): changed dockerfile * feat(api): changed mobile-env-var * feat(api): removed insights * feat(api): changed EE Dockerfile * feat(api): cast session_id to str for signing * feat(api): fixed error_id type * feat(api): fixed /errors priority conflict * feat(api): fixed /errors/{errorId} default params * feat(api): fixed change password after invitation * feat(api): use background task for emails instead of low-timeout-api feat(api): EE fixed missing required params * feat(api): funnel-insights payload change * feat(api): funnel-insights payload change * feat(api): changed edit user payload schema * feat(api): changed metrics payload schema * feat(api): changed metrics payload schema * feat(api): changed edit user default values feat(api): fixed change error status route * feat(api): changed edit user * feat(api): stop user from changing his own role * feat(api): changed add slack * feat(api): changed get funnel * feat(api): changed get funnel on the fly payload feat(api): changed update payload * feat(api): changed get funnel on the fly payload * feat(api): changed update funnel payload * feat(api): changed get funnel-sessions/issues on the fly payload * feat(api): fixed funnel missing rangeValue * feat(api): fixes * feat(api): iceServers configuration * feat(api): fix issueId casting * feat(api): changed issues-sessions endpoint payload-schema * feat(api): EE changed traces-ignored-routes * feat(api): EE include core sessions.py * feat(api): EE check licence on every request if expired * feat(api): move general stats to dynamic * feat(api): code cleanup feat(api): removed sentry * feat(api): changed traces-ignore-routes * feat(api): changed dependencies * feat(api): changed jwt-auth-response code * feat(api): changed traces-ignore-routes * feat(api): changed traces-ignore-routes * feat(api): removed PyTZ feat(api): migrated time-helper to zoneinfo * feat(api): EE added missing dependency feat(api): changed base docker image * feat(api): merge after roles * feat(api): EE roles fastapi * feat(db): handel HTTPExceptions * feat(db): changed payload schema * feat(db): changed payload schema * feat(api): included insights * feat(api): removed unused helper * feat(api): merge from dev to fatsapi * feat(api): merge fixes feat(api): SAML migration * feat(api): changed GET /signup response feat(api): changed EE Dockerfile * feat(api): changed edition detection * feat(api): include ee endpoints * feat(api): add/edit member changes * feat(api): saml changed redirect * feat(api): track session's replay feat(api): track error's details * feat(api): ignore tracking for read roles * feat(api): define global queue feat(api): define global scheduler feat(api): traces use queue feat(api): traces batch insert feat(DB): changed traces schema * feat(api): fix signup captcha * feat(api): fix signup captcha * feat(api): optional roleId feat(api): set roleId to member if None * feat(api): fixed edit role * feat(api): return role details when creating a new member * feat(api): trace: use BackgroundTasks instead of BackgroundTask to not override previous tasks * feat(api): trace: use BackgroundTask if no other background task is defined * feat(api): optimised delete metadata * feat(api): Notification optional message * feat(api): fix background-task reference * feat(api): fix trace-background-task * feat(api): fixed g-captcha for reset password * feat(api): fix edit self-user * feat(api): fixed create github-issue * feat(api): set misfire_grace_time for crons * feat(api): removed chalice feat(api): freeze dependencies * feat(api): refactored blueprints * feat(api): /metadata/session_search allow projectId=None * feat(api): public API, changed userId type * feat(api): fix upload sourcemaps * feat(api): user-trace support ApiKey endpoints * feat(api): fixed user-trace foreign key type * feat(api): fixed trace schema * feat(api): trace save auth-method * feat(api): trace fixed auth-method * feat(api): trace changed schema --- api/.env.default | 45 + api/Dockerfile | 7 +- api/app.py | 133 +- .../blueprints => auth}/__init__.py | 0 api/auth/auth_apikey.py | 28 + api/auth/auth_jwt.py | 39 + api/build.sh | 8 +- api/chalicelib/_overrides.py | 104 -- api/chalicelib/blueprints/app/v1_api.py | 127 -- api/chalicelib/blueprints/bp_authorizers.py | 37 - api/chalicelib/blueprints/bp_core.py | 909 -------------- api/chalicelib/blueprints/bp_core_crons.py | 18 - api/chalicelib/blueprints/bp_core_dynamic.py | 460 ------- .../blueprints/bp_core_dynamic_crons.py | 13 - .../blueprints/subs/bp_dashboard.py | 550 --------- api/chalicelib/core/alerts.py | 19 +- api/chalicelib/core/announcements.py | 4 +- api/chalicelib/core/assist.py | 15 +- api/chalicelib/core/authorizers.py | 17 +- api/chalicelib/core/collaboration_slack.py | 10 +- api/chalicelib/core/errors.py | 2 +- api/chalicelib/core/funnels.py | 8 +- api/chalicelib/core/insights.py | 932 ++++++++++++++ .../core/integration_github_issue.py | 2 +- api/chalicelib/core/jobs.py | 2 +- api/chalicelib/core/metadata.py | 4 +- api/chalicelib/core/mobile.py | 4 +- api/chalicelib/core/projects.py | 10 +- api/chalicelib/core/reset_password.py | 13 +- api/chalicelib/core/sessions.py | 4 +- api/chalicelib/core/sessions_assignments.py | 4 +- api/chalicelib/core/sessions_mobs.py | 15 +- api/chalicelib/core/signup.py | 29 +- api/chalicelib/core/slack.py | 6 +- api/chalicelib/core/socket_ios.py | 6 +- api/chalicelib/core/sourcemaps.py | 12 +- api/chalicelib/core/sourcemaps_parser.py | 14 +- api/chalicelib/core/tenants.py | 13 +- api/chalicelib/core/users.py | 51 +- api/chalicelib/core/webhook.py | 8 +- api/chalicelib/core/weekly_report.py | 4 +- api/chalicelib/utils/TimeUTC.py | 39 +- api/chalicelib/utils/captcha.py | 4 +- api/chalicelib/utils/email_handler.py | 8 +- api/chalicelib/utils/helper.py | 59 +- api/chalicelib/utils/pg_client.py | 12 +- api/chalicelib/utils/s3.py | 34 +- api/chalicelib/utils/s3urls.py | 120 -- api/chalicelib/utils/smtp.py | 20 +- api/entrypoint.sh | 3 +- api/env_handler.py | 13 - api/or_dependencies.py | 43 + api/requirements.txt | 12 +- .../blueprints/app => routers}/__init__.py | 0 .../subs => routers/app}/__init__.py | 0 api/routers/app/v1_api.py | 120 ++ api/routers/base.py | 12 + api/routers/core.py | 1097 +++++++++++++++++ api/routers/core_dynamic.py | 218 ++++ .../routers/crons}/__init__.py | 0 api/routers/crons/core_crons.py | 15 + api/routers/crons/core_dynamic_crons.py | 10 + .../routers}/subs/__init__.py | 0 api/routers/subs/dashboard.py | 346 ++++++ api/routers/subs/insights.py | 108 ++ api/run-dev.sh | 3 + api/schemas.py | 379 ++++++ ee/api/.env.default | 54 + ee/api/.gitignore | 26 +- ee/api/Dockerfile | 7 +- ee/api/app.py | 168 +-- ee/api/auth/__init__.py | 0 ee/api/auth/auth_project.py | 25 + ee/api/chalicelib/_overrides.py | 104 -- ee/api/chalicelib/blueprints/app/v1_api_ee.py | 16 - .../chalicelib/blueprints/bp_authorizers.py | 38 - .../chalicelib/blueprints/bp_core_dynamic.py | 470 ------- .../blueprints/bp_core_dynamic_crons.py | 21 - ee/api/chalicelib/blueprints/bp_ee.py | 58 - ee/api/chalicelib/blueprints/bp_ee_crons.py | 6 - .../blueprints/subs/bp_dashboard.py | 606 --------- ee/api/chalicelib/core/authorizers.py | 16 +- ee/api/chalicelib/core/insights.py | 1047 ++++++++++++++++ ee/api/chalicelib/core/license.py | 7 +- ee/api/chalicelib/core/metadata.py | 3 +- ee/api/chalicelib/core/projects.py | 31 +- ee/api/chalicelib/core/reset_password.py | 11 +- ee/api/chalicelib/core/signup.py | 33 +- ee/api/chalicelib/core/tenants.py | 4 +- ee/api/chalicelib/core/traces.py | 157 +++ ee/api/chalicelib/core/unlock.py | 15 +- ee/api/chalicelib/core/users.py | 54 +- ee/api/chalicelib/core/webhook.py | 5 +- ee/api/chalicelib/utils/SAML2_helper.py | 47 +- ee/api/chalicelib/utils/assist_helper.py | 9 +- ee/api/chalicelib/utils/ch_client.py | 6 +- ee/api/or_dependencies.py | 45 + ee/api/prepare-local.sh | 2 + ee/api/requirements.txt | 15 +- ee/api/routers/app/v1_api_ee.py | 12 + ee/api/routers/base.py | 14 + ee/api/routers/core_dynamic.py | 222 ++++ ee/api/routers/crons/core_dynamic_crons.py | 18 + ee/api/routers/ee.py | 60 + .../blueprints/bp_saml.py => routers/saml.py} | 73 +- ee/api/run-dev.sh | 3 + ee/api/schemas_ee.py | 19 + .../db/init_dbs/postgresql/1.5.0/1.5.0.sql | 18 + .../db/init_dbs/postgresql/init_schema.sql | 17 + 109 files changed, 5664 insertions(+), 4259 deletions(-) create mode 100644 api/.env.default rename api/{chalicelib/blueprints => auth}/__init__.py (100%) create mode 100644 api/auth/auth_apikey.py create mode 100644 api/auth/auth_jwt.py delete mode 100644 api/chalicelib/_overrides.py delete mode 100644 api/chalicelib/blueprints/app/v1_api.py delete mode 100644 api/chalicelib/blueprints/bp_authorizers.py delete mode 100644 api/chalicelib/blueprints/bp_core.py delete mode 100644 api/chalicelib/blueprints/bp_core_crons.py delete mode 100644 api/chalicelib/blueprints/bp_core_dynamic.py delete mode 100644 api/chalicelib/blueprints/bp_core_dynamic_crons.py delete mode 100644 api/chalicelib/blueprints/subs/bp_dashboard.py create mode 100644 api/chalicelib/core/insights.py delete mode 100644 api/chalicelib/utils/s3urls.py delete mode 100644 api/env_handler.py create mode 100644 api/or_dependencies.py rename api/{chalicelib/blueprints/app => routers}/__init__.py (100%) rename api/{chalicelib/blueprints/subs => routers/app}/__init__.py (100%) create mode 100644 api/routers/app/v1_api.py create mode 100644 api/routers/base.py create mode 100644 api/routers/core.py create mode 100644 api/routers/core_dynamic.py rename {ee/api/chalicelib/blueprints => api/routers/crons}/__init__.py (100%) create mode 100644 api/routers/crons/core_crons.py create mode 100644 api/routers/crons/core_dynamic_crons.py rename {ee/api/chalicelib/blueprints => api/routers}/subs/__init__.py (100%) create mode 100644 api/routers/subs/dashboard.py create mode 100644 api/routers/subs/insights.py create mode 100755 api/run-dev.sh create mode 100644 api/schemas.py create mode 100644 ee/api/.env.default create mode 100644 ee/api/auth/__init__.py create mode 100644 ee/api/auth/auth_project.py delete mode 100644 ee/api/chalicelib/_overrides.py delete mode 100644 ee/api/chalicelib/blueprints/app/v1_api_ee.py delete mode 100644 ee/api/chalicelib/blueprints/bp_authorizers.py delete mode 100644 ee/api/chalicelib/blueprints/bp_core_dynamic.py delete mode 100644 ee/api/chalicelib/blueprints/bp_core_dynamic_crons.py delete mode 100644 ee/api/chalicelib/blueprints/bp_ee.py delete mode 100644 ee/api/chalicelib/blueprints/bp_ee_crons.py delete mode 100644 ee/api/chalicelib/blueprints/subs/bp_dashboard.py create mode 100644 ee/api/chalicelib/core/insights.py create mode 100644 ee/api/chalicelib/core/traces.py create mode 100644 ee/api/or_dependencies.py create mode 100755 ee/api/prepare-local.sh create mode 100644 ee/api/routers/app/v1_api_ee.py create mode 100644 ee/api/routers/base.py create mode 100644 ee/api/routers/core_dynamic.py create mode 100644 ee/api/routers/crons/core_dynamic_crons.py create mode 100644 ee/api/routers/ee.py rename ee/api/{chalicelib/blueprints/bp_saml.py => routers/saml.py} (74%) create mode 100755 ee/api/run-dev.sh create mode 100644 ee/api/schemas_ee.py create mode 100644 ee/scripts/helm/db/init_dbs/postgresql/1.5.0/1.5.0.sql diff --git a/api/.env.default b/api/.env.default new file mode 100644 index 000000000..2e9101b60 --- /dev/null +++ b/api/.env.default @@ -0,0 +1,45 @@ +EMAIL_FROM=OpenReplay +EMAIL_HOST= +EMAIL_PASSWORD= +EMAIL_PORT=587 +EMAIL_SSL_CERT= +EMAIL_SSL_KEY= +EMAIL_USER= +EMAIL_USE_SSL=false +EMAIL_USE_TLS=true +S3_HOST= +S3_KEY= +S3_SECRET= +SITE_URL= +alert_ntf=http://127.0.0.1:8000/async/alerts/notifications/%s +announcement_url= +assign_link=http://127.0.0.1:8000/async/email_assignment +async_Token= +captcha_key= +captcha_server= +change_password_link=/reset-password?invitation=%s&&pass=%s +email_basic=http://127.0.0.1:8000/async/basic/%s +email_funnel=http://127.0.0.1:8000/async/funnel/%s +email_signup=http://127.0.0.1:8000/async/email_signup/%s +invitation_link=/api/users/invitation?token=%s +isEE=false +isFOS=true +js_cache_bucket=sessions-assets +jwt_algorithm=HS512 +jwt_exp_delta_seconds=2592000 +jwt_issuer=openreplay-default-foss +jwt_secret="SET A RANDOM STRING HERE" +peers=http://utilities-openreplay.app.svc.cluster.local:9000/assist/%s/peers +pg_dbname=postgres +pg_host=postgresql.db.svc.cluster.local +pg_password=asayerPostgres +pg_port=5432 +pg_user=postgres +put_S3_TTL=20 +sentryURL= +sessions_bucket=mobs +sessions_region=us-east-1 +sourcemaps_bucket=sourcemaps +sourcemaps_reader=http://utilities-openreplay.app.svc.cluster.local:9000/sourcemaps +stage=default-foss +version_number=1.4.0 \ No newline at end of file diff --git a/api/Dockerfile b/api/Dockerfile index c9c0eaf2c..a8d6210a0 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,9 +1,10 @@ -FROM python:3.6-slim +FROM python:3.9.7-slim LABEL Maintainer="Rajesh Rajendran" +LABEL Maintainer="KRAIEM Taha Yassine" WORKDIR /work COPY . . -RUN pip install -r requirements.txt -t ./vendor --upgrade -RUN pip install chalice==1.22.2 +RUN pip install -r requirements.txt +RUN mv .env.default .env # Add Tini # Startup daemon diff --git a/api/app.py b/api/app.py index e67810de5..47b200aef 100644 --- a/api/app.py +++ b/api/app.py @@ -1,109 +1,64 @@ -import sentry_sdk -from chalice import Chalice, Response -from sentry_sdk import configure_scope +from apscheduler.schedulers.asyncio import AsyncIOScheduler +from fastapi import FastAPI, Request +from fastapi.middleware.cors import CORSMiddleware +from starlette.responses import StreamingResponse -from chalicelib import _overrides -from chalicelib.blueprints import bp_authorizers -from chalicelib.blueprints import bp_core, bp_core_crons -from chalicelib.blueprints.app import v1_api -from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons -from chalicelib.blueprints.subs import bp_dashboard from chalicelib.utils import helper from chalicelib.utils import pg_client -from chalicelib.utils.helper import environ +from routers import core, core_dynamic +from routers.app import v1_api +from routers.crons import core_crons +from routers.crons import core_dynamic_crons +from routers.subs import dashboard -app = Chalice(app_name='parrot') -app.debug = not helper.is_production() or helper.is_local() - -sentry_sdk.init(environ["sentryURL"]) - -# Monkey-patch print for DataDog hack -import sys -import traceback - -old_tb = traceback.print_exception -old_f = sys.stdout -old_e = sys.stderr -OR_SESSION_TOKEN = None - - -class F: - def write(self, x): - if OR_SESSION_TOKEN is not None and x != '\n' and not helper.is_local(): - old_f.write(f"[or_session_token={OR_SESSION_TOKEN}] {x}") - else: - old_f.write(x) - - def flush(self): - pass - - -def tb_print_exception(etype, value, tb, limit=None, file=None, chain=True): - if OR_SESSION_TOKEN is not None and not helper.is_local(): - value = type(value)(f"[or_session_token={OR_SESSION_TOKEN}] " + str(value)) - - old_tb(etype, value, tb, limit, file, chain) - - -if helper.is_production(): - traceback.print_exception = tb_print_exception - -sys.stdout = F() -sys.stderr = F() -# ---End Monkey-patch - - -_overrides.chalice_app(app) +app = FastAPI() @app.middleware('http') -def or_middleware(event, get_response): +async def or_middleware(request: Request, call_next): global OR_SESSION_TOKEN - OR_SESSION_TOKEN = app.current_request.headers.get('vnd.openreplay.com.sid', - app.current_request.headers.get('vnd.asayer.io.sid')) - if "authorizer" in event.context and event.context["authorizer"] is None: - print("Deleted user!!") - pg_client.close() - return Response(body={"errors": ["Deleted user"]}, status_code=403) + OR_SESSION_TOKEN = request.headers.get('vnd.openreplay.com.sid', request.headers.get('vnd.asayer.io.sid')) try: if helper.TRACK_TIME: import time now = int(time.time() * 1000) - response = get_response(event) - - if response.status_code == 200 and response.body is not None and response.body.get("errors") is not None: - if "not found" in response.body["errors"][0]: - response = Response(status_code=404, body=response.body) - else: - response = Response(status_code=400, body=response.body) - if response.status_code // 100 == 5 and helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local(): - with configure_scope() as scope: - scope.set_tag('stage', environ["stage"]) - scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN) - scope.set_extra("context", event.context) - sentry_sdk.capture_exception(Exception(response.body)) + response: StreamingResponse = await call_next(request) if helper.TRACK_TIME: print(f"Execution time: {int(time.time() * 1000) - now} ms") except Exception as e: - if helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local(): - with configure_scope() as scope: - scope.set_tag('stage', environ["stage"]) - scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN) - scope.set_extra("context", event.context) - sentry_sdk.capture_exception(e) - response = Response(body={"Code": "InternalServerError", - "Message": "An internal server error occurred [level=Fatal]."}, - status_code=500) + pg_client.close() + raise e pg_client.close() return response -# Open source -app.register_blueprint(bp_authorizers.app) -app.register_blueprint(bp_core.app) -app.register_blueprint(bp_core_crons.app) -app.register_blueprint(bp_core_dynamic.app) -app.register_blueprint(bp_core_dynamic_crons.app) -app.register_blueprint(bp_dashboard.app) -app.register_blueprint(v1_api.app) +origins = [ + "*", +] + +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) +app.include_router(core.public_app) +app.include_router(core.app) +app.include_router(core.app_apikey) +app.include_router(core_dynamic.public_app) +app.include_router(core_dynamic.app) +app.include_router(core_dynamic.app_apikey) +app.include_router(dashboard.app) +# app.include_router(insights.app) +app.include_router(v1_api.app_apikey) + +Schedule = AsyncIOScheduler() +Schedule.start() + +for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: + Schedule.add_job(id=job["func"].__name__, **job) + +# for job in Schedule.get_jobs(): +# print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) diff --git a/api/chalicelib/blueprints/__init__.py b/api/auth/__init__.py similarity index 100% rename from api/chalicelib/blueprints/__init__.py rename to api/auth/__init__.py diff --git a/api/auth/auth_apikey.py b/api/auth/auth_apikey.py new file mode 100644 index 000000000..9e385a993 --- /dev/null +++ b/api/auth/auth_apikey.py @@ -0,0 +1,28 @@ +from typing import Optional + +from fastapi import Request +from fastapi.security import APIKeyHeader +from starlette import status +from starlette.exceptions import HTTPException + +from chalicelib.core import authorizers +from schemas import CurrentAPIContext + + +class APIKeyAuth(APIKeyHeader): + def __init__(self, auto_error: bool = True): + super(APIKeyAuth, self).__init__(name="Authorization", auto_error=auto_error) + + async def __call__(self, request: Request) -> Optional[CurrentAPIContext]: + api_key: Optional[str] = await super(APIKeyAuth, self).__call__(request) + r = authorizers.api_key_authorizer(api_key) + if r is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid API Key", + ) + r["authorizer_identity"] = "api_key" + print(r) + request.state.authorizer_identity = "api_key" + request.state.currentContext = CurrentAPIContext(tenant_id=r["tenantId"]) + return request.state.currentContext diff --git a/api/auth/auth_jwt.py b/api/auth/auth_jwt.py new file mode 100644 index 000000000..bf6c1901b --- /dev/null +++ b/api/auth/auth_jwt.py @@ -0,0 +1,39 @@ +from typing import Optional + +from fastapi import Request +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +from starlette import status +from starlette.exceptions import HTTPException + +from chalicelib.core import authorizers, users +from schemas import CurrentContext + + +class JWTAuth(HTTPBearer): + def __init__(self, auto_error: bool = True): + super(JWTAuth, self).__init__(auto_error=auto_error) + + async def __call__(self, request: Request) -> Optional[CurrentContext]: + credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request) + if credentials: + if not credentials.scheme == "Bearer": + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.") + jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials) + if jwt_payload is None \ + or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \ + or not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"], + jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") + user = users.get(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"]) + if user is None: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.") + jwt_payload["authorizer_identity"] = "jwt" + print(jwt_payload) + request.state.authorizer_identity = "jwt" + request.state.currentContext = CurrentContext(tenant_id=jwt_payload["tenantId"], + user_id=jwt_payload["userId"], + email=user["email"]) + return request.state.currentContext + + else: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authorization code.") diff --git a/api/build.sh b/api/build.sh index d66a54ab9..ee7cec3ab 100644 --- a/api/build.sh +++ b/api/build.sh @@ -26,11 +26,11 @@ function build_api(){ envarg="default-ee" tag="ee-" } - docker build -f ./Dockerfile --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/chalice:${git_sha1} . + docker build -f ./Dockerfile --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/api:${git_sha1} . [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/chalice:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/chalice:${git_sha1} ${DOCKER_REPO:-'local'}/chalice:${tag}latest - docker push ${DOCKER_REPO:-'local'}/chalice:${tag}latest + docker push ${DOCKER_REPO:-'local'}/api:${git_sha1} + docker tag ${DOCKER_REPO:-'local'}/api:${git_sha1} ${DOCKER_REPO:-'local'}/api:${tag}latest + docker push ${DOCKER_REPO:-'local'}/api:${tag}latest } } diff --git a/api/chalicelib/_overrides.py b/api/chalicelib/_overrides.py deleted file mode 100644 index 2bf0b6d2a..000000000 --- a/api/chalicelib/_overrides.py +++ /dev/null @@ -1,104 +0,0 @@ -from chalice import Chalice, CORSConfig -from chalicelib.blueprints import bp_authorizers -from chalicelib.core import authorizers - -import sched -import threading -import time -from datetime import datetime -import pytz -from croniter import croniter - -base_time = datetime.now(pytz.utc) - -cors_config = CORSConfig( - allow_origin='*', - allow_headers=['vnd.openreplay.com.sid', 'vnd.asayer.io.sid'], - # max_age=600, - # expose_headers=['X-Special-Header'], - allow_credentials=True -) - - -def chalice_app(app): - def app_route(self, path, **kwargs): - kwargs.setdefault('cors', cors_config) - kwargs.setdefault('authorizer', bp_authorizers.jwt_authorizer) - handler_type = 'route' - name = kwargs.pop('name', None) - registration_kwargs = {'path': path, 'kwargs': kwargs, 'authorizer': kwargs.get("authorizer")} - - def _register_handler(user_handler): - handler_name = name - if handler_name is None: - handler_name = user_handler.__name__ - if registration_kwargs is not None: - kwargs = registration_kwargs - else: - kwargs = {} - - if kwargs['authorizer'] == bp_authorizers.jwt_authorizer \ - or kwargs['authorizer'] == bp_authorizers.api_key_authorizer: - def _user_handler(context=None, **args): - if context is not None: - args['context'] = context - else: - authorizer_context = app.current_request.context['authorizer'] - if kwargs['authorizer'] == bp_authorizers.jwt_authorizer: - args['context'] = authorizers.jwt_context(authorizer_context) - else: - args['context'] = authorizer_context - return user_handler(**args) - - wrapped = self._wrap_handler(handler_type, handler_name, _user_handler) - self._register_handler(handler_type, handler_name, _user_handler, wrapped, kwargs) - else: - wrapped = self._wrap_handler(handler_type, handler_name, user_handler) - self._register_handler(handler_type, handler_name, user_handler, wrapped, kwargs) - return wrapped - - return _register_handler - - app.route = app_route.__get__(app, Chalice) - - def app_schedule(self, expression, name=None, description=''): - handler_type = 'schedule' - registration_kwargs = {'expression': expression, - 'description': description} - - def _register_handler(user_handler): - handler_name = name - if handler_name is None: - handler_name = user_handler.__name__ - kwargs = registration_kwargs - cron_expression = kwargs["expression"].to_string()[len("cron("):-1] - if len(cron_expression.split(" ")) > 5: - cron_expression = " ".join(cron_expression.split(" ")[:-1]) - cron_expression = cron_expression.replace("?", "*") - cron_shell(user_handler, cron_expression) - - wrapped = self._wrap_handler(handler_type, handler_name, user_handler) - self._register_handler(handler_type, handler_name, user_handler, wrapped, kwargs) - return wrapped - - return _register_handler - - app.schedule = app_schedule.__get__(app, Chalice) - - def spawn(function, args): - th = threading.Thread(target=function, kwargs=args) - th.setDaemon(True) - th.start() - - def cron_shell(function, cron_expression): - def to_start(): - scheduler = sched.scheduler(time.time, time.sleep) - citer = croniter(cron_expression, base_time) - while True: - next_execution = citer.get_next(datetime) - print(f"{function.__name__} next execution: {next_execution}") - scheduler.enterabs(next_execution.timestamp(), 1, function, argument=(None,)) - scheduler.run() - print(f"{function.__name__} executed: {next_execution}") - - spawn(to_start, None) diff --git a/api/chalicelib/blueprints/app/v1_api.py b/api/chalicelib/blueprints/app/v1_api.py deleted file mode 100644 index 1d69bb8a3..000000000 --- a/api/chalicelib/blueprints/app/v1_api.py +++ /dev/null @@ -1,127 +0,0 @@ -from chalice import Blueprint, Response - -from chalicelib import _overrides -from chalicelib.blueprints import bp_authorizers -from chalicelib.core import sessions, events, jobs, projects -from chalicelib.utils.TimeUTC import TimeUTC - -app = Blueprint(__name__) -_overrides.chalice_app(app) - - -@app.route('/v1/{projectKey}/users/{userId}/sessions', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) -def get_user_sessions(projectKey, userId, context): - projectId = projects.get_internal_project_id(projectKey) - params = app.current_request.query_params - - if params is None: - params = {} - - return { - 'data': sessions.get_user_sessions( - project_id=projectId, - user_id=userId, - start_date=params.get('start_date'), - end_date=params.get('end_date') - ) - } - - -@app.route('/v1/{projectKey}/sessions/{sessionId}/events', methods=['GET'], - authorizer=bp_authorizers.api_key_authorizer) -def get_session_events(projectKey, sessionId, context): - projectId = projects.get_internal_project_id(projectKey) - return { - 'data': events.get_by_sessionId2_pg( - project_id=projectId, - session_id=sessionId - ) - } - - -@app.route('/v1/{projectKey}/users/{userId}', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) -def get_user_details(projectKey, userId, context): - projectId = projects.get_internal_project_id(projectKey) - return { - 'data': sessions.get_session_user( - project_id=projectId, - user_id=userId - ) - } - pass - - -@app.route('/v1/{projectKey}/users/{userId}', methods=['DELETE'], authorizer=bp_authorizers.api_key_authorizer) -def schedule_to_delete_user_data(projectKey, userId, context): - projectId = projects.get_internal_project_id(projectKey) - data = app.current_request.json_body - - data["action"] = "delete_user_data" - data["reference_id"] = userId - data["description"] = f"Delete user sessions of userId = {userId}" - data["start_at"] = TimeUTC.to_human_readable(TimeUTC.midnight(1)) - record = jobs.create(project_id=projectId, data=data) - return { - 'data': record - } - - -@app.route('/v1/{projectKey}/jobs', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) -def get_jobs(projectKey, context): - projectId = projects.get_internal_project_id(projectKey) - return { - 'data': jobs.get_all(project_id=projectId) - } - pass - - -@app.route('/v1/{projectKey}/jobs/{jobId}', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) -def get_job(projectKey, jobId, context): - return { - 'data': jobs.get(job_id=jobId) - } - pass - - -@app.route('/v1/{projectKey}/jobs/{jobId}', methods=['DELETE'], authorizer=bp_authorizers.api_key_authorizer) -def cancel_job(projectKey, jobId, context): - job = jobs.get(job_id=jobId) - job_not_found = len(job.keys()) == 0 - - if job_not_found or job["status"] == jobs.JobStatus.COMPLETED or job["status"] == jobs.JobStatus.CANCELLED: - return Response(status_code=501, body="The request job has already been canceled/completed (or was not found).") - - job["status"] = "cancelled" - return { - 'data': jobs.update(job_id=jobId, job=job) - } - -@app.route('/v1/projects', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) -def get_projects(context): - records = projects.get_projects(tenant_id=context['tenantId']) - for record in records: - del record['projectId'] - - return { - 'data': records - } - - -@app.route('/v1/projects/{projectKey}', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) -def get_project(projectKey, context): - return { - 'data': projects.get_project_by_key(tenant_id=context['tenantId'], project_key=projectKey) - } - - -@app.route('/v1/projects', methods=['POST'], authorizer=bp_authorizers.api_key_authorizer) -def create_project(context): - data = app.current_request.json_body - record = projects.create( - tenant_id=context['tenantId'], - user_id=None, - data=data, - skip_authorization=True - ) - del record['data']['projectId'] - return record diff --git a/api/chalicelib/blueprints/bp_authorizers.py b/api/chalicelib/blueprints/bp_authorizers.py deleted file mode 100644 index 888f2910d..000000000 --- a/api/chalicelib/blueprints/bp_authorizers.py +++ /dev/null @@ -1,37 +0,0 @@ -from chalice import Blueprint, AuthResponse -from chalicelib.core import authorizers - -from chalicelib.core import users - -app = Blueprint(__name__) - - -@app.authorizer() -def api_key_authorizer(auth_request): - r = authorizers.api_key_authorizer(auth_request.token) - if r is None: - return AuthResponse(routes=[], principal_id=None) - r["authorizer_identity"] = "api_key" - print(r) - return AuthResponse( - routes=['*'], - principal_id=r['tenantId'], - context=r - ) - - -@app.authorizer(ttl_seconds=60) -def jwt_authorizer(auth_request): - jwt_payload = authorizers.jwt_authorizer(auth_request.token) - if jwt_payload is None \ - or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \ - or not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"], - jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]): - return AuthResponse(routes=[], principal_id=None) - jwt_payload["authorizer_identity"] = "jwt" - print(jwt_payload) - return AuthResponse( - routes=['*'], - principal_id=jwt_payload['userId'], - context=jwt_payload - ) diff --git a/api/chalicelib/blueprints/bp_core.py b/api/chalicelib/blueprints/bp_core.py deleted file mode 100644 index 303bca306..000000000 --- a/api/chalicelib/blueprints/bp_core.py +++ /dev/null @@ -1,909 +0,0 @@ -from chalice import Blueprint -from chalice import Response - -from chalicelib import _overrides -from chalicelib.blueprints import bp_authorizers -from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \ - sessions_metas, alerts, funnels, issues, integrations_manager, errors_favorite_viewed, metadata, \ - log_tool_elasticsearch, log_tool_datadog, \ - log_tool_stackdriver, reset_password, sessions_favorite_viewed, \ - log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, errors, sessions, \ - log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \ - assist, heatmaps, mobile -from chalicelib.core.collaboration_slack import Slack -from chalicelib.utils import email_helper -from chalicelib.utils.helper import environ - -app = Blueprint(__name__) -_overrides.chalice_app(app) - - -@app.route('/{projectId}/sessions2/favorite', methods=['GET']) -def get_favorite_sessions2(projectId, context): - params = app.current_request.query_params - - return { - 'data': sessions.get_favorite_sessions(project_id=projectId, user_id=context["userId"], include_viewed=True) - } - - -@app.route('/{projectId}/sessions2/{sessionId}', methods=['GET']) -def get_session2(projectId, sessionId, context): - data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context["userId"], - include_fav_viewed=True, group_metadata=True) - if data is None: - return {"errors": ["session not found"]} - - sessions_favorite_viewed.view_session(project_id=projectId, user_id=context['userId'], session_id=sessionId) - return { - 'data': data - } - - -@app.route('/{projectId}/sessions2/{sessionId}/favorite', methods=['GET']) -def add_remove_favorite_session2(projectId, sessionId, context): - return { - "data": sessions_favorite_viewed.favorite_session(project_id=projectId, user_id=context['userId'], - session_id=sessionId)} - - -@app.route('/{projectId}/sessions2/{sessionId}/assign', methods=['GET']) -def assign_session(projectId, sessionId, context): - data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId, - tenant_id=context['tenantId'], - user_id=context["userId"]) - if "errors" in data: - return data - return { - 'data': data - } - - -@app.route('/{projectId}/sessions2/{sessionId}/errors/{errorId}/sourcemaps', methods=['GET']) -def get_error_trace(projectId, sessionId, errorId, context): - data = errors.get_trace(project_id=projectId, error_id=errorId) - if "errors" in data: - return data - return { - 'data': data - } - - -@app.route('/{projectId}/sessions2/{sessionId}/assign/{issueId}', methods=['GET']) -def assign_session(projectId, sessionId, issueId, context): - data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, - tenant_id=context['tenantId'], user_id=context["userId"]) - if "errors" in data: - return data - return { - 'data': data - } - - -@app.route('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', methods=['POST', 'PUT']) -def comment_assignment(projectId, sessionId, issueId, context): - data = app.current_request.json_body - data = sessions_assignments.comment(tenant_id=context['tenantId'], project_id=projectId, - session_id=sessionId, assignment_id=issueId, - user_id=context["userId"], message=data["message"]) - if "errors" in data.keys(): - return data - return { - 'data': data - } - - -@app.route('/{projectId}/events/search', methods=['GET']) -def events_search(projectId, context): - params = app.current_request.query_params - if params is None: - return {"data": []} - - q = params.get('q', '') - if len(q) == 0: - return {"data": []} - result = events.search_pg2(q, params.get('type', ''), project_id=projectId, source=params.get('source'), - key=params.get("key")) - return result - - -@app.route('/{projectId}/sessions/search2', methods=['POST']) -def sessions_search2(projectId, context): - data = app.current_request.json_body - - data = sessions.search2_pg(data, projectId, user_id=context["userId"]) - return {'data': data} - - -@app.route('/{projectId}/sessions/filters', methods=['GET']) -def session_filter_values(projectId, context): - return {'data': sessions_metas.get_key_values(projectId)} - - -@app.route('/{projectId}/sessions/filters/top', methods=['GET']) -def session_top_filter_values(projectId, context): - return {'data': sessions_metas.get_top_key_values(projectId)} - - -@app.route('/{projectId}/sessions/filters/search', methods=['GET']) -def get_session_filters_meta(projectId, context): - params = app.current_request.query_params - if params is None: - return {"data": []} - - meta_type = params.get('type', '') - if len(meta_type) == 0: - return {"data": []} - q = params.get('q', '') - if len(q) == 0: - return {"data": []} - return sessions_metas.search(project_id=projectId, meta_type=meta_type, text=q) - - -@app.route('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', - methods=['POST', 'PUT']) -def integration_notify(projectId, integration, integrationId, source, sourceId, context): - data = app.current_request.json_body - comment = None - if "comment" in data: - comment = data["comment"] - if integration == "slack": - args = {"tenant_id": context["tenantId"], - "user": context['email'], "comment": comment, "project_id": projectId, - "integration_id": integrationId} - if source == "sessions": - return Slack.share_session(session_id=sourceId, **args) - elif source == "errors": - return Slack.share_error(error_id=sourceId, **args) - return {"data": None} - - -@app.route('/integrations/sentry', methods=['GET']) -def get_all_sentry(context): - return {"data": log_tool_sentry.get_all(tenant_id=context["tenantId"])} - - -@app.route('/{projectId}/integrations/sentry', methods=['GET']) -def get_sentry(projectId, context): - return {"data": log_tool_sentry.get(project_id=projectId)} - - -@app.route('/{projectId}/integrations/sentry', methods=['POST', 'PUT']) -def add_edit_sentry(projectId, context): - data = app.current_request.json_body - - return {"data": log_tool_sentry.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)} - - -@app.route('/{projectId}/integrations/sentry', methods=['DELETE']) -def delete_sentry(projectId, context): - return {"data": log_tool_sentry.delete(tenant_id=context["tenantId"], project_id=projectId)} - - -@app.route('/{projectId}/integrations/sentry/events/{eventId}', methods=['GET']) -def proxy_sentry(projectId, eventId, context): - return {"data": log_tool_sentry.proxy_get(tenant_id=context["tenantId"], project_id=projectId, event_id=eventId)} - - -@app.route('/integrations/datadog', methods=['GET']) -def get_all_datadog(context): - return {"data": log_tool_datadog.get_all(tenant_id=context["tenantId"])} - - -@app.route('/{projectId}/integrations/datadog', methods=['GET']) -def get_datadog(projectId, context): - return {"data": log_tool_datadog.get(project_id=projectId)} - - -@app.route('/{projectId}/integrations/datadog', methods=['POST', 'PUT']) -def add_edit_datadog(projectId, context): - data = app.current_request.json_body - - return {"data": log_tool_datadog.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)} - - -@app.route('/{projectId}/integrations/datadog', methods=['DELETE']) -def delete_datadog(projectId, context): - return {"data": log_tool_datadog.delete(tenant_id=context["tenantId"], project_id=projectId)} - - -@app.route('/integrations/stackdriver', methods=['GET']) -def get_all_stackdriver(context): - return {"data": log_tool_stackdriver.get_all(tenant_id=context["tenantId"])} - - -@app.route('/{projectId}/integrations/stackdriver', methods=['GET']) -def get_stackdriver(projectId, context): - return {"data": log_tool_stackdriver.get(project_id=projectId)} - - -@app.route('/{projectId}/integrations/stackdriver', methods=['POST', 'PUT']) -def add_edit_stackdriver(projectId, context): - data = app.current_request.json_body - - return {"data": log_tool_stackdriver.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)} - - -@app.route('/{projectId}/integrations/stackdriver', methods=['DELETE']) -def delete_stackdriver(projectId, context): - return {"data": log_tool_stackdriver.delete(tenant_id=context["tenantId"], project_id=projectId)} - - -@app.route('/integrations/newrelic', methods=['GET']) -def get_all_newrelic(context): - return {"data": log_tool_newrelic.get_all(tenant_id=context["tenantId"])} - - -@app.route('/{projectId}/integrations/newrelic', methods=['GET']) -def get_newrelic(projectId, context): - return {"data": log_tool_newrelic.get(project_id=projectId)} - - -@app.route('/{projectId}/integrations/newrelic', methods=['POST', 'PUT']) -def add_edit_newrelic(projectId, context): - data = app.current_request.json_body - - return {"data": log_tool_newrelic.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)} - - -@app.route('/{projectId}/integrations/newrelic', methods=['DELETE']) -def delete_newrelic(projectId, context): - return {"data": log_tool_newrelic.delete(tenant_id=context["tenantId"], project_id=projectId)} - - -@app.route('/integrations/rollbar', methods=['GET']) -def get_all_rollbar(context): - return {"data": log_tool_rollbar.get_all(tenant_id=context["tenantId"])} - - -@app.route('/{projectId}/integrations/rollbar', methods=['GET']) -def get_rollbar(projectId, context): - return {"data": log_tool_rollbar.get(project_id=projectId)} - - -@app.route('/{projectId}/integrations/rollbar', methods=['POST', 'PUT']) -def add_edit_rollbar(projectId, context): - data = app.current_request.json_body - - return {"data": log_tool_rollbar.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)} - - -@app.route('/{projectId}/integrations/rollbar', methods=['DELETE']) -def delete_datadog(projectId, context): - return {"data": log_tool_rollbar.delete(tenant_id=context["tenantId"], project_id=projectId)} - - -@app.route('/integrations/bugsnag/list_projects', methods=['POST']) -def list_projects_bugsnag(context): - data = app.current_request.json_body - return {"data": log_tool_bugsnag.list_projects(auth_token=data["authorizationToken"])} - - -@app.route('/integrations/bugsnag', methods=['GET']) -def get_all_bugsnag(context): - return {"data": log_tool_bugsnag.get_all(tenant_id=context["tenantId"])} - - -@app.route('/{projectId}/integrations/bugsnag', methods=['GET']) -def get_bugsnag(projectId, context): - return {"data": log_tool_bugsnag.get(project_id=projectId)} - - -@app.route('/{projectId}/integrations/bugsnag', methods=['POST', 'PUT']) -def add_edit_bugsnag(projectId, context): - data = app.current_request.json_body - - return {"data": log_tool_bugsnag.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)} - - -@app.route('/{projectId}/integrations/bugsnag', methods=['DELETE']) -def delete_bugsnag(projectId, context): - return {"data": log_tool_bugsnag.delete(tenant_id=context["tenantId"], project_id=projectId)} - - -@app.route('/integrations/cloudwatch/list_groups', methods=['POST']) -def list_groups_cloudwatch(context): - data = app.current_request.json_body - return {"data": log_tool_cloudwatch.list_log_groups(aws_access_key_id=data["awsAccessKeyId"], - aws_secret_access_key=data["awsSecretAccessKey"], - region=data["region"])} - - -@app.route('/integrations/cloudwatch', methods=['GET']) -def get_all_cloudwatch(context): - return {"data": log_tool_cloudwatch.get_all(tenant_id=context["tenantId"])} - - -@app.route('/{projectId}/integrations/cloudwatch', methods=['GET']) -def get_cloudwatch(projectId, context): - return {"data": log_tool_cloudwatch.get(project_id=projectId)} - - -@app.route('/{projectId}/integrations/cloudwatch', methods=['POST', 'PUT']) -def add_edit_cloudwatch(projectId, context): - data = app.current_request.json_body - - return {"data": log_tool_cloudwatch.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)} - - -@app.route('/{projectId}/integrations/cloudwatch', methods=['DELETE']) -def delete_cloudwatch(projectId, context): - return {"data": log_tool_cloudwatch.delete(tenant_id=context["tenantId"], project_id=projectId)} - - -@app.route('/integrations/elasticsearch', methods=['GET']) -def get_all_elasticsearch(context): - return {"data": log_tool_elasticsearch.get_all(tenant_id=context["tenantId"])} - - -@app.route('/{projectId}/integrations/elasticsearch', methods=['GET']) -def get_elasticsearch(projectId, context): - return {"data": log_tool_elasticsearch.get(project_id=projectId)} - - -@app.route('/integrations/elasticsearch/test', methods=['POST']) -def test_elasticsearch_connection(context): - data = app.current_request.json_body - return {"data": log_tool_elasticsearch.ping(tenant_id=context["tenantId"], **data)} - - -@app.route('/{projectId}/integrations/elasticsearch', methods=['POST', 'PUT']) -def add_edit_elasticsearch(projectId, context): - data = app.current_request.json_body - - return {"data": log_tool_elasticsearch.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)} - - -@app.route('/{projectId}/integrations/elasticsearch', methods=['DELETE']) -def delete_elasticsearch(projectId, context): - return {"data": log_tool_elasticsearch.delete(tenant_id=context["tenantId"], project_id=projectId)} - - -@app.route('/integrations/sumologic', methods=['GET']) -def get_all_sumologic(context): - return {"data": log_tool_sumologic.get_all(tenant_id=context["tenantId"])} - - -@app.route('/{projectId}/integrations/sumologic', methods=['GET']) -def get_sumologic(projectId, context): - return {"data": log_tool_sumologic.get(project_id=projectId)} - - -@app.route('/{projectId}/integrations/sumologic', methods=['POST', 'PUT']) -def add_edit_sumologic(projectId, context): - data = app.current_request.json_body - - return {"data": log_tool_sumologic.add_edit(tenant_id=context["tenantId"], project_id=projectId, data=data)} - - -@app.route('/{projectId}/integrations/sumologic', methods=['DELETE']) -def delete_sumologic(projectId, context): - return {"data": log_tool_sumologic.delete(tenant_id=context["tenantId"], project_id=projectId)} - - -@app.route('/integrations/issues', methods=['GET']) -def get_integration_status(context): - error, integration = integrations_manager.get_integration(tenant_id=context["tenantId"], - user_id=context["userId"]) - if error is not None: - return {"data": {}} - return {"data": integration.get_obfuscated()} - - -@app.route('/integrations/jira', methods=['POST', 'PUT']) -def add_edit_jira_cloud(context): - data = app.current_request.json_body - error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER, - tenant_id=context["tenantId"], - user_id=context["userId"]) - if error is not None: - return error - return {"data": integration.add_edit(data=data)} - - -@app.route('/integrations/github', methods=['POST', 'PUT']) -def add_edit_github(context): - data = app.current_request.json_body - error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER, - tenant_id=context["tenantId"], - user_id=context["userId"]) - if error is not None: - return error - return {"data": integration.add_edit(data=data)} - - -@app.route('/integrations/issues', methods=['DELETE']) -def delete_default_issue_tracking_tool(context): - error, integration = integrations_manager.get_integration(tenant_id=context["tenantId"], - user_id=context["userId"]) - if error is not None: - return error - return {"data": integration.delete()} - - -@app.route('/integrations/jira', methods=['DELETE']) -def delete_jira_cloud(context): - error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER, - tenant_id=context["tenantId"], - user_id=context["userId"]) - if error is not None: - return error - return {"data": integration.delete()} - - -@app.route('/integrations/github', methods=['DELETE']) -def delete_github(context): - error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER, - tenant_id=context["tenantId"], - user_id=context["userId"]) - if error is not None: - return error - return {"data": integration.delete()} - - -@app.route('/integrations/issues/list_projects', methods=['GET']) -def get_all_issue_tracking_projects(context): - error, integration = integrations_manager.get_integration(tenant_id=context["tenantId"], - user_id=context["userId"]) - if error is not None: - return error - data = integration.issue_handler.get_projects() - if "errors" in data: - return data - return {"data": data} - - -@app.route('/integrations/issues/{integrationProjectId}', methods=['GET']) -def get_integration_metadata(integrationProjectId, context): - error, integration = integrations_manager.get_integration(tenant_id=context["tenantId"], - user_id=context["userId"]) - if error is not None: - return error - data = integration.issue_handler.get_metas(integrationProjectId) - if "errors" in data.keys(): - return data - return {"data": data} - - -@app.route('/{projectId}/assignments', methods=['GET']) -def get_all_assignments(projectId, context): - data = sessions_assignments.get_all(project_id=projectId, user_id=context["userId"]) - return { - 'data': data - } - - -@app.route('/{projectId}/sessions2/{sessionId}/assign/projects/{integrationProjectId}', methods=['POST', 'PUT']) -def create_issue_assignment(projectId, sessionId, integrationProjectId, context): - data = app.current_request.json_body - data = sessions_assignments.create_new_assignment(tenant_id=context['tenantId'], project_id=projectId, - session_id=sessionId, - creator_id=context["userId"], assignee=data["assignee"], - description=data["description"], title=data["title"], - issue_type=data["issueType"], - integration_project_id=integrationProjectId) - if "errors" in data.keys(): - return data - return { - 'data': data - } - - -@app.route('/{projectId}/gdpr', methods=['GET']) -def get_gdpr(projectId, context): - return {"data": projects.get_gdpr(project_id=projectId)} - - -@app.route('/{projectId}/gdpr', methods=['POST', 'PUT']) -def edit_gdpr(projectId, context): - data = app.current_request.json_body - - return {"data": projects.edit_gdpr(project_id=projectId, gdpr=data)} - - -@app.route('/password/reset-link', methods=['PUT', 'POST'], authorizer=None) -def reset_password_handler(): - data = app.current_request.json_body - if "email" not in data or len(data["email"]) < 5: - return {"errors": ["please provide a valid email address"]} - return reset_password.reset(data) - - -@app.route('/{projectId}/metadata', methods=['GET']) -def get_metadata(projectId, context): - return {"data": metadata.get(project_id=projectId)} - - -@app.route('/{projectId}/metadata/list', methods=['POST', 'PUT']) -def add_edit_delete_metadata(projectId, context): - data = app.current_request.json_body - - return metadata.add_edit_delete(tenant_id=context["tenantId"], project_id=projectId, new_metas=data["list"]) - - -@app.route('/{projectId}/metadata', methods=['POST', 'PUT']) -def add_metadata(projectId, context): - data = app.current_request.json_body - - return metadata.add(tenant_id=context["tenantId"], project_id=projectId, new_name=data["key"]) - - -@app.route('/{projectId}/metadata/{index}', methods=['POST', 'PUT']) -def edit_metadata(projectId, index, context): - data = app.current_request.json_body - - return metadata.edit(tenant_id=context["tenantId"], project_id=projectId, index=int(index), - new_name=data["key"]) - - -@app.route('/{projectId}/metadata/{index}', methods=['DELETE']) -def delete_metadata(projectId, index, context): - return metadata.delete(tenant_id=context["tenantId"], project_id=projectId, index=index) - - -@app.route('/{projectId}/metadata/search', methods=['GET']) -def search_metadata(projectId, context): - params = app.current_request.query_params - q = params.get('q', '') - key = params.get('key', '') - if len(q) == 0 and len(key) == 0: - return {"data": []} - if len(q) == 0: - return {"errors": ["please provide a value for search"]} - if len(key) == 0: - return {"errors": ["please provide a key for search"]} - return metadata.search(tenant_id=context["tenantId"], project_id=projectId, value=q, key=key) - - -@app.route('/{projectId}/integration/sources', methods=['GET']) -def search_integrations(projectId, context): - return log_tools.search(project_id=projectId) - - -@app.route('/async/email_assignment', methods=['POST', 'PUT'], authorizer=None) -def async_send_signup_emails(): - data = app.current_request.json_body - if data.pop("auth") != environ["async_Token"]: - return {} - email_helper.send_assign_session(recipient=data["email"], link=data["link"], message=data["message"]) - - -@app.route('/async/funnel/weekly_report2', methods=['POST', 'PUT'], authorizer=None) -def async_weekly_report(): - print("=========================> Sending weekly report") - data = app.current_request.json_body - if data.pop("auth") != environ["async_Token"]: - return {} - email_helper.weekly_report2(recipients=data["email"], data=data.get("data", None)) - - -@app.route('/async/basic/{step}', methods=['POST', 'PUT'], authorizer=None) -def async_basic_emails(step): - data = app.current_request.json_body - if data.pop("auth") != environ["async_Token"]: - return {} - if step.lower() == "member_invitation": - email_helper.send_team_invitation(recipient=data["email"], invitation_link=data["invitationLink"], - client_id=data["clientId"], sender_name=data["senderName"]) - - -@app.route('/{projectId}/sample_rate', methods=['GET']) -def get_capture_status(projectId, context): - return {"data": projects.get_capture_status(project_id=projectId)} - - -@app.route('/{projectId}/sample_rate', methods=['POST', 'PUT']) -def update_capture_status(projectId, context): - data = app.current_request.json_body - - return {"data": projects.update_capture_status(project_id=projectId, changes=data)} - - -@app.route('/announcements', methods=['GET']) -def get_all_announcements(context): - return {"data": announcements.get_all(context["userId"])} - - -@app.route('/announcements/view', methods=['GET']) -def get_all_announcements(context): - return {"data": announcements.view(user_id=context["userId"])} - - -@app.route('/{projectId}/errors/{errorId}/{action}', methods=['GET']) -def add_remove_favorite_error(projectId, errorId, action, context): - if action == "favorite": - return errors_favorite_viewed.favorite_error(project_id=projectId, user_id=context['userId'], error_id=errorId) - elif action == "sessions": - params = app.current_request.query_params - if params is None: - params = {} - start_date = params.get("startDate") - end_date = params.get("endDate") - return { - "data": errors.get_sessions(project_id=projectId, user_id=context['userId'], error_id=errorId, - start_date=start_date, end_date=end_date)} - elif action in list(errors.ACTION_STATE.keys()): - return errors.change_state(project_id=projectId, user_id=context['userId'], error_id=errorId, action=action) - else: - return {"errors": ["undefined action"]} - - -@app.route('/{projectId}/errors/merge', methods=['POST']) -def errors_merge(projectId, context): - data = app.current_request.json_body - - data = errors.merge(error_ids=data.get("errors", [])) - return data - - -@app.route('/show_banner', methods=['GET']) -def errors_merge(context): - return {"data": False} - - -@app.route('/{projectId}/alerts', methods=['POST', 'PUT']) -def create_alert(projectId, context): - data = app.current_request.json_body - return alerts.create(projectId, data) - - -@app.route('/{projectId}/alerts', methods=['GET']) -def get_all_alerts(projectId, context): - return {"data": alerts.get_all(projectId)} - - -@app.route('/{projectId}/alerts/{alertId}', methods=['GET']) -def get_alert(projectId, alertId, context): - return {"data": alerts.get(alertId)} - - -@app.route('/{projectId}/alerts/{alertId}', methods=['POST', 'PUT']) -def update_alert(projectId, alertId, context): - data = app.current_request.json_body - return alerts.update(alertId, data) - - -@app.route('/{projectId}/alerts/{alertId}', methods=['DELETE']) -def delete_alert(projectId, alertId, context): - return alerts.delete(projectId, alertId) - - -@app.route('/{projectId}/funnels', methods=['POST', 'PUT']) -def add_funnel(projectId, context): - data = app.current_request.json_body - return funnels.create(project_id=projectId, - user_id=context['userId'], - name=data["name"], - filter=data["filter"], - is_public=data.get("isPublic", False)) - - -@app.route('/{projectId}/funnels', methods=['GET']) -def get_funnels(projectId, context): - params = app.current_request.query_params - if params is None: - params = {} - - return {"data": funnels.get_by_user(project_id=projectId, - user_id=context['userId'], - range_value=None, - start_date=None, - end_date=None, - details=False)} - - -@app.route('/{projectId}/funnels/details', methods=['GET']) -def get_funnels_with_details(projectId, context): - params = app.current_request.query_params - if params is None: - params = {} - - return {"data": funnels.get_by_user(project_id=projectId, - user_id=context['userId'], - range_value=params.get("rangeValue", None), - start_date=params.get('startDate', None), - end_date=params.get('endDate', None), - details=True)} - - -@app.route('/{projectId}/funnels/issue_types', methods=['GET']) -def get_possible_issue_types(projectId, context): - params = app.current_request.query_params - if params is None: - params = {} - - return {"data": funnels.get_possible_issue_types(project_id=projectId)} - - -@app.route('/{projectId}/funnels/{funnelId}/insights', methods=['GET']) -def get_funnel_insights(projectId, funnelId, context): - params = app.current_request.query_params - if params is None: - params = {} - - return funnels.get_top_insights(funnel_id=funnelId, project_id=projectId, - range_value=params.get("range_value", None), - start_date=params.get('startDate', None), - end_date=params.get('endDate', None)) - - -@app.route('/{projectId}/funnels/{funnelId}/insights', methods=['POST', 'PUT']) -def get_funnel_insights_on_the_fly(projectId, funnelId, context): - params = app.current_request.query_params - if params is None: - params = {} - data = app.current_request.json_body - if data is None: - data = {} - - return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, project_id=projectId, data={**params, **data}) - - -@app.route('/{projectId}/funnels/{funnelId}/issues', methods=['GET']) -def get_funnel_issues(projectId, funnelId, context): - params = app.current_request.query_params - if params is None: - params = {} - - return funnels.get_issues(funnel_id=funnelId, project_id=projectId, - range_value=params.get("range_value", None), - start_date=params.get('startDate', None), end_date=params.get('endDate', None)) - - -@app.route('/{projectId}/funnels/{funnelId}/issues', methods=['POST', 'PUT']) -def get_funnel_issues_on_the_fly(projectId, funnelId, context): - params = app.current_request.query_params - if params is None: - params = {} - data = app.current_request.json_body - if data is None: - data = {} - - return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, project_id=projectId, data={**params, **data})} - - -@app.route('/{projectId}/funnels/{funnelId}/sessions', methods=['GET']) -def get_funnel_sessions(projectId, funnelId, context): - params = app.current_request.query_params - if params is None: - params = {} - - return {"data": funnels.get_sessions(funnel_id=funnelId, user_id=context['userId'], project_id=projectId, - range_value=params.get("range_value", None), - start_date=params.get('startDate', None), - end_date=params.get('endDate', None))} - - -@app.route('/{projectId}/funnels/{funnelId}/sessions', methods=['POST', 'PUT']) -def get_funnel_sessions_on_the_fly(projectId, funnelId, context): - params = app.current_request.query_params - if params is None: - params = {} - data = app.current_request.json_body - if data is None: - data = {} - return {"data": funnels.get_sessions_on_the_fly(funnel_id=funnelId, user_id=context['userId'], project_id=projectId, - data={**params, **data})} - - -@app.route('/{projectId}/funnels/issues/{issueId}/sessions', methods=['GET']) -def get_issue_sessions(projectId, issueId, context): - params = app.current_request.query_params - if params is None: - params = {} - - issue = issues.get(project_id=projectId, issue_id=issueId) - return { - "data": {"sessions": sessions.search_by_issue(user_id=context["userId"], project_id=projectId, issue=issue, - start_date=params.get('startDate', None), - end_date=params.get('endDate', None)), - "issue": issue}} - - -@app.route('/{projectId}/funnels/{funnelId}/issues/{issueId}/sessions', methods=['POST', 'PUT']) -def get_funnel_issue_sessions(projectId, funnelId, issueId, context): - data = app.current_request.json_body - - data = funnels.search_by_issue(project_id=projectId, user_id=context["userId"], issue_id=issueId, - funnel_id=funnelId, data=data) - if "errors" in data: - return data - if data.get("issue") is None: - data["issue"] = issues.get(project_id=projectId, issue_id=issueId) - return { - "data": data - } - - -@app.route('/{projectId}/funnels/{funnelId}', methods=['GET']) -def get_funnel(projectId, funnelId, context): - data = funnels.get(funnel_id=funnelId, - project_id=projectId) - if data is None: - return {"errors": ["funnel not found"]} - return {"data": data} - - -@app.route('/{projectId}/funnels/{funnelId}', methods=['POST', 'PUT']) -def edit_funnel(projectId, funnelId, context): - data = app.current_request.json_body - return funnels.update(funnel_id=funnelId, - user_id=context['userId'], - name=data.get("name"), - filter=data.get("filter"), - is_public=data.get("isPublic")) - - -@app.route('/{projectId}/funnels/{funnelId}', methods=['DELETE']) -def delete_filter(projectId, funnelId, context): - return funnels.delete(user_id=context['userId'], funnel_id=funnelId, project_id=projectId) - - -@app.route('/{projectId}/sourcemaps', methods=['PUT'], authorizer=bp_authorizers.api_key_authorizer) -def sign_sourcemap_for_upload(projectId, context): - data = app.current_request.json_body - project_id = projects.get_internal_project_id(projectId) - if project_id is None: - return Response(status_code=400, body='invalid projectId') - - return {"data": sourcemaps.presign_upload_urls(project_id=project_id, urls=data["URL"])} - - -@app.route('/config/weekly_report', methods=['GET']) -def get_weekly_report_config(context): - return {"data": weekly_report.get_config(user_id=context['userId'])} - - -@app.route('/config/weekly_report', methods=['POST', 'PUT']) -def get_weekly_report_config(context): - data = app.current_request.json_body - return {"data": weekly_report.edit_config(user_id=context['userId'], weekly_report=data.get("weeklyReport", True))} - - -@app.route('/{projectId}/issue_types', methods=['GET']) -def issue_types(projectId, context): - # return {"data": issues.get_types_by_project(project_id=projectId)} - return {"data": issues.get_all_types()} - - -@app.route('/issue_types', methods=['GET']) -def all_issue_types(context): - return {"data": issues.get_all_types()} - - -@app.route('/flows', methods=['GET', 'PUT', 'POST', 'DELETE']) -@app.route('/{projectId}/flows', methods=['GET', 'PUT', 'POST', 'DELETE']) -def removed_endpoints(projectId=None, context=None): - return Response(body={"errors": ["Endpoint no longer available"]}, status_code=410) - - -@app.route('/{projectId}/assist/sessions', methods=['GET']) -def sessions_live(projectId, context): - data = assist.get_live_sessions(projectId) - return {'data': data} - - -@app.route('/{projectId}/assist/sessions', methods=['POST']) -def sessions_live_search(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - data = assist.get_live_sessions(projectId, filters=data.get("filters")) - return {'data': data} - - -@app.route('/{projectId}/heatmaps/url', methods=['POST']) -def get_heatmaps_by_url(projectId, context): - data = app.current_request.json_body - return {"data": heatmaps.get_by_url(project_id=projectId, data=data)} - - -@app.route('/general_stats', methods=['GET'], authorizer=None) -def get_general_stats(): - return {"data": {"sessions:": sessions.count_all()}} - - -@app.route('/{projectId}/mobile/{sessionId}/urls', methods=['POST']) -def mobile_signe(projectId, sessionId, context): - data = app.current_request.json_body - return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data["keys"])} diff --git a/api/chalicelib/blueprints/bp_core_crons.py b/api/chalicelib/blueprints/bp_core_crons.py deleted file mode 100644 index 817ffbb16..000000000 --- a/api/chalicelib/blueprints/bp_core_crons.py +++ /dev/null @@ -1,18 +0,0 @@ -from chalice import Blueprint -from chalice import Cron -from chalicelib import _overrides -from chalicelib.core import reset_password, weekly_report, jobs - -app = Blueprint(__name__) -_overrides.chalice_app(app) - - -@app.schedule(Cron('0', '*', '?', '*', '*', '*')) -def run_scheduled_jobs(event): - jobs.execute_jobs() - - -# Run every monday. -@app.schedule(Cron('5', '0', '?', '*', 'MON', '*')) -def weekly_report2(event): - weekly_report.cron() diff --git a/api/chalicelib/blueprints/bp_core_dynamic.py b/api/chalicelib/blueprints/bp_core_dynamic.py deleted file mode 100644 index b494ea953..000000000 --- a/api/chalicelib/blueprints/bp_core_dynamic.py +++ /dev/null @@ -1,460 +0,0 @@ -from chalice import Blueprint, Response - -from chalicelib import _overrides -from chalicelib.core import assist -from chalicelib.core import boarding -from chalicelib.core import errors -from chalicelib.core import license -from chalicelib.core import metadata, errors_favorite_viewed, slack, alerts, sessions, integrations_manager -from chalicelib.core import notifications -from chalicelib.core import projects -from chalicelib.core import signup -from chalicelib.core import tenants -from chalicelib.core import users -from chalicelib.core import webhook -from chalicelib.core.collaboration_slack import Slack -from chalicelib.utils import captcha -from chalicelib.utils import helper -from chalicelib.utils.helper import environ - -app = Blueprint(__name__) -_overrides.chalice_app(app) - - -@app.route('/login', methods=['POST'], authorizer=None) -def login(): - data = app.current_request.json_body - if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]): - return {"errors": ["Invalid captcha."]} - r = users.authenticate(data['email'], data['password'], - for_plugin=False - ) - if r is None: - return Response(status_code=401, body={ - 'errors': ['You’ve entered invalid Email or Password.'] - }) - - tenant_id = r.pop("tenantId") - - r["limits"] = { - "teamMember": -1, - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(tenant_id)} - - c = tenants.get_by_tenant_id(tenant_id) - c.pop("createdAt") - c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, - stack_integrations=True, version=True) - c["smtp"] = helper.has_smtp() - c["iceServers"] = assist.get_ice_servers() - return { - 'jwt': r.pop('jwt'), - 'data': { - "user": r, - "client": c - } - } - - -@app.route('/account', methods=['GET']) -def get_account(context): - r = users.get(tenant_id=context['tenantId'], user_id=context['userId']) - return { - 'data': { - **r, - "limits": { - "teamMember": -1, - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(context['tenantId']) - }, - **license.get_status(context["tenantId"]), - "smtp": helper.has_smtp(), - "iceServers": assist.get_ice_servers() - } - } - - -@app.route('/projects', methods=['GET']) -def get_projects(context): - return {"data": projects.get_projects(tenant_id=context["tenantId"], recording_state=True, gdpr=True, recorded=True, - stack_integrations=True, version=True)} - - -@app.route('/projects', methods=['POST', 'PUT']) -def create_project(context): - data = app.current_request.json_body - return projects.create(tenant_id=context["tenantId"], user_id=context["userId"], data=data) - - -@app.route('/projects/{projectId}', methods=['POST', 'PUT']) -def create_edit_project(projectId, context): - data = app.current_request.json_body - - return projects.edit(tenant_id=context["tenantId"], user_id=context["userId"], data=data, project_id=projectId) - - -@app.route('/projects/{projectId}', methods=['GET']) -def get_project(projectId, context): - data = projects.get_project(tenant_id=context["tenantId"], project_id=projectId, include_last_session=True, - include_gdpr=True) - if data is None: - return {"errors": ["project not found"]} - return {"data": data} - - -@app.route('/projects/{projectId}', methods=['DELETE']) -def delete_project(projectId, context): - return projects.delete(tenant_id=context["tenantId"], user_id=context["userId"], project_id=projectId) - - -@app.route('/projects/limit', methods=['GET']) -def get_projects_limit(context): - return {"data": { - "current": projects.count_by_tenant(tenant_id=context["tenantId"]), - "remaining": -1 - }} - - -@app.route('/client', methods=['GET']) -def get_client(context): - r = tenants.get_by_tenant_id(context['tenantId']) - if r is not None: - r.pop("createdAt") - r["projects"] = projects.get_projects(tenant_id=context['tenantId'], recording_state=True, recorded=True, - stack_integrations=True, version=True) - return { - 'data': r - } - - -@app.route('/client/new_api_key', methods=['GET']) -def generate_new_tenant_token(context): - return { - 'data': tenants.generate_new_api_key(context['tenantId']) - } - - -@app.route('/client', methods=['PUT', 'POST']) -def put_client(context): - data = app.current_request.json_body - return tenants.update(tenant_id=context["tenantId"], user_id=context["userId"], data=data) - - -@app.route('/signup', methods=['GET'], authorizer=None) -def get_all_signup(): - return {"data": {"tenants": tenants.tenants_exists(), - "sso": None, - "ssoProvider": None, - "edition": helper.get_edition()}} - - -@app.route('/signup', methods=['POST', 'PUT'], authorizer=None) -def signup_handler(): - data = app.current_request.json_body - return signup.create_step1(data) - - -@app.route('/integrations/slack', methods=['POST', 'PUT']) -def add_slack_client(context): - data = app.current_request.json_body - if "url" not in data or "name" not in data: - return {"errors": ["please provide a url and a name"]} - n = Slack.add_channel(tenant_id=context["tenantId"], url=data["url"], name=data["name"]) - if n is None: - return { - "errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."] - } - return {"data": n} - - -@app.route('/integrations/slack/{integrationId}', methods=['POST', 'PUT']) -def edit_slack_integration(integrationId, context): - data = app.current_request.json_body - if data.get("url") and len(data["url"]) > 0: - old = webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId) - if old["endpoint"] != data["url"]: - if not Slack.say_hello(data["url"]): - return { - "errors": [ - "We couldn't send you a test message on your Slack channel. Please verify your webhook url."] - } - return {"data": webhook.update(tenant_id=context["tenantId"], webhook_id=integrationId, - changes={"name": data.get("name", ""), "endpoint": data["url"]})} - - -@app.route('/{projectId}/errors/search', methods=['POST']) -def errors_search(projectId, context): - data = app.current_request.json_body - params = app.current_request.query_params - if params is None: - params = {} - - return errors.search(data, projectId, user_id=context["userId"], status=params.get("status", "ALL"), - favorite_only="favorite" in params) - - -@app.route('/{projectId}/errors/stats', methods=['GET']) -def errors_stats(projectId, context): - params = app.current_request.query_params - if params is None: - params = {} - - return errors.stats(projectId, user_id=context["userId"], **params) - - -@app.route('/{projectId}/errors/{errorId}', methods=['GET']) -def errors_get_details(projectId, errorId, context): - params = app.current_request.query_params - if params is None: - params = {} - - data = errors.get_details(project_id=projectId, user_id=context["userId"], error_id=errorId, **params) - if data.get("data") is not None: - errors_favorite_viewed.viewed_error(project_id=projectId, user_id=context['userId'], error_id=errorId) - return data - - -@app.route('/{projectId}/errors/{errorId}/stats', methods=['GET']) -def errors_get_details_right_column(projectId, errorId, context): - params = app.current_request.query_params - if params is None: - params = {} - - data = errors.get_details_chart(project_id=projectId, user_id=context["userId"], error_id=errorId, **params) - return data - - -@app.route('/{projectId}/errors/{errorId}/sourcemaps', methods=['GET']) -def errors_get_details_sourcemaps(projectId, errorId, context): - data = errors.get_trace(project_id=projectId, error_id=errorId) - if "errors" in data: - return data - return { - 'data': data - } - - -@app.route('/async/alerts/notifications/{step}', methods=['POST', 'PUT'], authorizer=None) -def send_alerts_notification_async(step): - data = app.current_request.json_body - if data.pop("auth") != environ["async_Token"]: - return {"errors": ["missing auth"]} - if step == "slack": - slack.send_batch(notifications_list=data.get("notifications")) - elif step == "email": - alerts.send_by_email_batch(notifications_list=data.get("notifications")) - elif step == "webhook": - webhook.trigger_batch(data_list=data.get("notifications")) - - -@app.route('/notifications', methods=['GET']) -def get_notifications(context): - return {"data": notifications.get_all(tenant_id=context['tenantId'], user_id=context['userId'])} - - -@app.route('/notifications/{notificationId}/view', methods=['GET']) -def view_notifications(notificationId, context): - return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context['userId'])} - - -@app.route('/notifications/view', methods=['POST', 'PUT']) -def batch_view_notifications(context): - data = app.current_request.json_body - return {"data": notifications.view_notification(notification_ids=data.get("ids", []), - startTimestamp=data.get("startTimestamp"), - endTimestamp=data.get("endTimestamp"), - user_id=context['userId'], - tenant_id=context["tenantId"])} - - -@app.route('/notifications', methods=['POST', 'PUT'], authorizer=None) -def create_notifications(): - data = app.current_request.json_body - if data.get("token", "") != "nF46JdQqAM5v9KI9lPMpcu8o9xiJGvNNWOGL7TJP": - return {"errors": ["missing token"]} - return notifications.create(data.get("notifications", [])) - - -@app.route('/boarding', methods=['GET']) -def get_boarding_state(context): - return {"data": boarding.get_state(tenant_id=context["tenantId"])} - - -@app.route('/boarding/installing', methods=['GET']) -def get_boarding_state_installing(context): - return {"data": boarding.get_state_installing(tenant_id=context["tenantId"])} - - -@app.route('/boarding/identify-users', methods=['GET']) -def get_boarding_state_identify_users(context): - return {"data": boarding.get_state_identify_users(tenant_id=context["tenantId"])} - - -@app.route('/boarding/manage-users', methods=['GET']) -def get_boarding_state_manage_users(context): - return {"data": boarding.get_state_manage_users(tenant_id=context["tenantId"])} - - -@app.route('/boarding/integrations', methods=['GET']) -def get_boarding_state_integrations(context): - return {"data": boarding.get_state_integrations(tenant_id=context["tenantId"])} - - -# this endpoint supports both jira & github based on `provider` attribute -@app.route('/integrations/issues', methods=['POST', 'PUT']) -def add_edit_jira_cloud_github(context): - data = app.current_request.json_body - provider = data.get("provider", "").upper() - error, integration = integrations_manager.get_integration(tool=provider, tenant_id=context["tenantId"], - user_id=context["userId"]) - if error is not None: - return error - return {"data": integration.add_edit(data=data)} - - -@app.route('/integrations/slack/{integrationId}', methods=['GET']) -def get_slack_webhook(integrationId, context): - return {"data": webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId)} - - -@app.route('/integrations/slack/channels', methods=['GET']) -def get_slack_integration(context): - return {"data": webhook.get_by_type(tenant_id=context["tenantId"], webhook_type='slack')} - - -@app.route('/integrations/slack/{integrationId}', methods=['DELETE']) -def delete_slack_integration(integrationId, context): - return webhook.delete(context["tenantId"], integrationId) - - -@app.route('/webhooks', methods=['POST', 'PUT']) -def add_edit_webhook(context): - data = app.current_request.json_body - return {"data": webhook.add_edit(tenant_id=context["tenantId"], data=data, replace_none=True)} - - -@app.route('/webhooks', methods=['GET']) -def get_webhooks(context): - return {"data": webhook.get_by_tenant(tenant_id=context["tenantId"], replace_none=True)} - - -@app.route('/webhooks/{webhookId}', methods=['DELETE']) -def delete_webhook(webhookId, context): - return {"data": webhook.delete(tenant_id=context["tenantId"], webhook_id=webhookId)} - - -@app.route('/client/members', methods=['GET']) -def get_members(context): - return {"data": users.get_members(tenant_id=context['tenantId'])} - - -@app.route('/client/members', methods=['PUT', 'POST']) -def add_member(context): - data = app.current_request.json_body - return users.create_member(tenant_id=context['tenantId'], user_id=context['userId'], data=data) - - -@app.route('/users/invitation', methods=['GET'], authorizer=None) -def process_invitation_link(): - params = app.current_request.query_params - if params is None or len(params.get("token", "")) < 64: - return {"errors": ["please provide a valid invitation"]} - user = users.get_by_invitation_token(params["token"]) - if user is None: - return {"errors": ["invitation not found"]} - if user["expiredInvitation"]: - return {"errors": ["expired invitation, please ask your admin to send a new one"]} - if user["expiredChange"] is not None and not user["expiredChange"] \ - and user["changePwdToken"] is not None and user["changePwdAge"] < -5 * 60: - pass_token = user["changePwdToken"] - else: - pass_token = users.allow_password_change(user_id=user["userId"]) - return Response( - status_code=307, - body='', - headers={'Location': environ["SITE_URL"] + environ["change_password_link"] % (params["token"], pass_token), - 'Content-Type': 'text/plain'}) - - -@app.route('/password/reset', methods=['POST', 'PUT'], authorizer=None) -def change_password_by_invitation(): - data = app.current_request.json_body - if data is None or len(data.get("invitation", "")) < 64 or len(data.get("pass", "")) < 8: - return {"errors": ["please provide a valid invitation & pass"]} - user = users.get_by_invitation_token(token=data["invitation"], pass_token=data["pass"]) - if user is None: - return {"errors": ["invitation not found"]} - if user["expiredChange"]: - return {"errors": ["expired change, please re-use the invitation link"]} - - return users.set_password_invitation(new_password=data["password"], user_id=user["userId"]) - - -@app.route('/client/members/{memberId}', methods=['PUT', 'POST']) -def edit_member(memberId, context): - data = app.current_request.json_body - return users.edit(tenant_id=context['tenantId'], editor_id=context['userId'], changes=data, - user_id_to_update=memberId) - - -@app.route('/client/members/{memberId}/reset', methods=['GET']) -def reset_reinvite_member(memberId, context): - return users.reset_member(tenant_id=context['tenantId'], editor_id=context['userId'], user_id_to_update=memberId) - - -@app.route('/client/members/{memberId}', methods=['DELETE']) -def delete_member(memberId, context): - return users.delete_member(tenant_id=context["tenantId"], user_id=context['userId'], id_to_delete=memberId) - - -@app.route('/account/new_api_key', methods=['GET']) -def generate_new_user_token(context): - return {"data": users.generate_new_api_key(user_id=context['userId'])} - - -@app.route('/account', methods=['POST', 'PUT']) -def edit_account(context): - data = app.current_request.json_body - return users.edit(tenant_id=context['tenantId'], user_id_to_update=context['userId'], changes=data, - editor_id=context['userId']) - - -@app.route('/account/password', methods=['PUT', 'POST']) -def change_client_password(context): - data = app.current_request.json_body - return users.change_password(email=context['email'], old_password=data["oldPassword"], - new_password=data["newPassword"], tenant_id=context["tenantId"], - user_id=context["userId"]) - - -@app.route('/metadata/session_search', methods=['GET']) -def search_sessions_by_metadata(context): - params = app.current_request.query_params - if params is None: - return {"errors": ["please provide a key&value for search"]} - value = params.get('value', '') - key = params.get('key', '') - project_id = params.get('projectId') - if len(value) == 0 and len(key) == 0: - return {"errors": ["please provide a key&value for search"]} - if len(value) == 0: - return {"errors": ["please provide a value for search"]} - if len(key) == 0: - return {"errors": ["please provide a key for search"]} - return { - "data": sessions.search_by_metadata(tenant_id=context["tenantId"], user_id=context["userId"], m_value=value, - m_key=key, - project_id=project_id)} - - -@app.route('/plans', methods=['GET']) -def get_current_plan(context): - return { - "data": license.get_status(context["tenantId"]) - } - - -@app.route('/alerts/notifications', methods=['POST', 'PUT'], authorizer=None) -def send_alerts_notifications(): - data = app.current_request.json_body - return {"data": alerts.process_notifications(data.get("notifications", []))} diff --git a/api/chalicelib/blueprints/bp_core_dynamic_crons.py b/api/chalicelib/blueprints/bp_core_dynamic_crons.py deleted file mode 100644 index 74d8766e5..000000000 --- a/api/chalicelib/blueprints/bp_core_dynamic_crons.py +++ /dev/null @@ -1,13 +0,0 @@ -from chalice import Blueprint, Cron -from chalicelib import _overrides - -app = Blueprint(__name__) -_overrides.chalice_app(app) - -from chalicelib.core import telemetry - - -# Run every day. -@app.schedule(Cron('0', '0', '?', '*', '*', '*')) -def telemetry_cron(event): - telemetry.compute() diff --git a/api/chalicelib/blueprints/subs/bp_dashboard.py b/api/chalicelib/blueprints/subs/bp_dashboard.py deleted file mode 100644 index 00b3c0ed4..000000000 --- a/api/chalicelib/blueprints/subs/bp_dashboard.py +++ /dev/null @@ -1,550 +0,0 @@ -from chalice import Blueprint -from chalicelib.utils import helper -from chalicelib import _overrides - -from chalicelib.core import dashboard -from chalicelib.core import metadata - -app = Blueprint(__name__) -_overrides.chalice_app(app) - - -@app.route('/{projectId}/dashboard/metadata', methods=['GET']) -def get_metadata_map(projectId, context): - metamap = [] - for m in metadata.get(project_id=projectId): - metamap.append({"name": m["key"], "key": f"metadata{m['index']}"}) - return {"data": metamap} - - -@app.route('/{projectId}/dashboard/sessions', methods=['GET', 'POST']) -def get_dashboard_processed_sessions(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_processed_sessions(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/errors', methods=['GET', 'POST']) -def get_dashboard_errors(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_errors(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/errors_trend', methods=['GET', 'POST']) -def get_dashboard_errors_trend(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_errors_trend(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/application_activity', methods=['GET', 'POST']) -def get_dashboard_application_activity(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_application_activity(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/page_metrics', methods=['GET', 'POST']) -def get_dashboard_page_metrics(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_page_metrics(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/user_activity', methods=['GET', 'POST']) -def get_dashboard_user_activity(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_user_activity(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/performance', methods=['GET', 'POST']) -def get_dashboard_performance(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_performance(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/slowest_images', methods=['GET', 'POST']) -def get_dashboard_slowest_images(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_slowest_images(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/missing_resources', methods=['GET', 'POST']) -def get_performance_sessions(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_missing_resources_trend(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/network', methods=['GET', 'POST']) -def get_network_widget(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_network(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) -def get_dashboard_autocomplete(projectId, widget, context): - params = app.current_request.query_params - if params is None or params.get('q') is None or len(params.get('q')) == 0: - return {"data": []} - params['q'] = '^' + params['q'] - - if widget in ['performance']: - data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, - platform=params.get('platform', None), performance=True) - elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render', - 'impacted_sessions_by_slow_pages', 'pages_response_time']: - data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, - platform=params.get('platform', None), pages_only=True) - elif widget in ['resources_loading_time']: - data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, - platform=params.get('platform', None), performance=False) - elif widget in ['time_between_events', 'events']: - data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, - platform=params.get('platform', None), performance=False, events_only=True) - elif widget in ['metadata']: - data = dashboard.search(params.get('q', ''), None, project_id=projectId, - platform=params.get('platform', None), metadata=True, key=params.get("key")) - else: - return {"errors": [f"unsupported widget: {widget}"]} - return {'data': data} - - -# 1 -@app.route('/{projectId}/dashboard/slowest_resources', methods=['GET', 'POST']) -def get_dashboard_slowest_resources(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_slowest_resources(project_id=projectId, **{**data, **args})} - - -# 2 -@app.route('/{projectId}/dashboard/resources_loading_time', methods=['GET', 'POST']) -def get_dashboard_resources(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_resources_loading_time(project_id=projectId, **{**data, **args})} - - -# 3 -@app.route('/{projectId}/dashboard/pages_dom_buildtime', methods=['GET', 'POST']) -def get_dashboard_pages_dom(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_pages_dom_build_time(project_id=projectId, **{**data, **args})} - - -# 4 -@app.route('/{projectId}/dashboard/busiest_time_of_day', methods=['GET', 'POST']) -def get_dashboard_busiest_time_of_day(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_busiest_time_of_day(project_id=projectId, **{**data, **args})} - - -# 5 -@app.route('/{projectId}/dashboard/sessions_location', methods=['GET', 'POST']) -def get_dashboard_sessions_location(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_sessions_location(project_id=projectId, **{**data, **args})} - - -# 6 -@app.route('/{projectId}/dashboard/speed_location', methods=['GET', 'POST']) -def get_dashboard_speed_location(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_speed_index_location(project_id=projectId, **{**data, **args})} - - -# 7 -@app.route('/{projectId}/dashboard/pages_response_time', methods=['GET', 'POST']) -def get_dashboard_pages_response_time(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_pages_response_time(project_id=projectId, **{**data, **args})} - - -# 8 -@app.route('/{projectId}/dashboard/pages_response_time_distribution', methods=['GET', 'POST']) -def get_dashboard_pages_response_time_distribution(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_pages_response_time_distribution(project_id=projectId, **{**data, **args})} - - -# 9 -@app.route('/{projectId}/dashboard/top_metrics', methods=['GET', 'POST']) -def get_dashboard_top_metrics(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_top_metrics(project_id=projectId, **{**data, **args})} - - -# 10 -@app.route('/{projectId}/dashboard/time_to_render', methods=['GET', 'POST']) -def get_dashboard_time_to_render(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_time_to_render(project_id=projectId, **{**data, **args})} - - -# 11 -@app.route('/{projectId}/dashboard/impacted_sessions_by_slow_pages', methods=['GET', 'POST']) -def get_dashboard_impacted_sessions_by_slow_pages(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_impacted_sessions_by_slow_pages(project_id=projectId, **{**data, **args})} - - -# 12 -@app.route('/{projectId}/dashboard/memory_consumption', methods=['GET', 'POST']) -def get_dashboard_memory_consumption(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_memory_consumption(project_id=projectId, **{**data, **args})} - - -# 12.1 -@app.route('/{projectId}/dashboard/fps', methods=['GET', 'POST']) -def get_dashboard_avg_fps(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_avg_fps(project_id=projectId, **{**data, **args})} - - -# 12.2 -@app.route('/{projectId}/dashboard/cpu', methods=['GET', 'POST']) -def get_dashboard_avg_cpu(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_avg_cpu(project_id=projectId, **{**data, **args})} - - -# 13 -@app.route('/{projectId}/dashboard/crashes', methods=['GET', 'POST']) -def get_dashboard_impacted_sessions_by_slow_pages(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_crashes(project_id=projectId, **{**data, **args})} - - -# 14 -@app.route('/{projectId}/dashboard/domains_errors', methods=['GET', 'POST']) -def get_dashboard_domains_errors(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_domains_errors(project_id=projectId, **{**data, **args})} - - -# 14.1 -@app.route('/{projectId}/dashboard/domains_errors_4xx', methods=['GET', 'POST']) -def get_dashboard_domains_errors_4xx(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_domains_errors_4xx(project_id=projectId, **{**data, **args})} - - -# 14.2 -@app.route('/{projectId}/dashboard/domains_errors_5xx', methods=['GET', 'POST']) -def get_dashboard_domains_errors_5xx(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_domains_errors_5xx(project_id=projectId, **{**data, **args})} - - -# 15 -@app.route('/{projectId}/dashboard/slowest_domains', methods=['GET', 'POST']) -def get_dashboard_slowest_domains(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_slowest_domains(project_id=projectId, **{**data, **args})} - - -# 16 -@app.route('/{projectId}/dashboard/errors_per_domains', methods=['GET', 'POST']) -def get_dashboard_errors_per_domains(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_errors_per_domains(project_id=projectId, **{**data, **args})} - - -# 17 -@app.route('/{projectId}/dashboard/sessions_per_browser', methods=['GET', 'POST']) -def get_dashboard_sessions_per_browser(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_sessions_per_browser(project_id=projectId, **{**data, **args})} - - -# 18 -@app.route('/{projectId}/dashboard/calls_errors', methods=['GET', 'POST']) -def get_dashboard_calls_errors(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_calls_errors(project_id=projectId, **{**data, **args})} - - -# 18.1 -@app.route('/{projectId}/dashboard/calls_errors_4xx', methods=['GET', 'POST']) -def get_dashboard_calls_errors_4xx(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_calls_errors_4xx(project_id=projectId, **{**data, **args})} - - -# 18.2 -@app.route('/{projectId}/dashboard/calls_errors_5xx', methods=['GET', 'POST']) -def get_dashboard_calls_errors_5xx(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_calls_errors_5xx(project_id=projectId, **{**data, **args})} - - -# 19 -@app.route('/{projectId}/dashboard/errors_per_type', methods=['GET', 'POST']) -def get_dashboard_errors_per_type(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_errors_per_type(project_id=projectId, **{**data, **args})} - - -# 20 -@app.route('/{projectId}/dashboard/resources_by_party', methods=['GET', 'POST']) -def get_dashboard_resources_by_party(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_resources_by_party(project_id=projectId, **{**data, **args})} - - -# 21 -@app.route('/{projectId}/dashboard/resource_type_vs_response_end', methods=['GET', 'POST']) -def get_dashboard_errors_per_resource_type(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.resource_type_vs_response_end(project_id=projectId, **{**data, **args})} - - -# 22 -@app.route('/{projectId}/dashboard/resources_vs_visually_complete', methods=['GET', 'POST']) -def get_dashboard_resources_vs_visually_complete(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_resources_vs_visually_complete(project_id=projectId, **{**data, **args})} - - -# 23 -@app.route('/{projectId}/dashboard/impacted_sessions_by_js_errors', methods=['GET', 'POST']) -def get_dashboard_impacted_sessions_by_js_errors(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_impacted_sessions_by_js_errors(project_id=projectId, **{**data, **args})} - - -# 24 -@app.route('/{projectId}/dashboard/resources_count_by_type', methods=['GET', 'POST']) -def get_dashboard_resources_count_by_type(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_resources_count_by_type(project_id=projectId, **{**data, **args})} - - -# 25 -@app.route('/{projectId}/dashboard/time_between_events', methods=['GET']) -def get_dashboard_resources_count_by_type(projectId, context): - return {"errors": ["please choose 2 events"]} - - -@app.route('/{projectId}/dashboard/overview', methods=['GET', 'POST']) -def get_dashboard_group(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": [ - *helper.explode_widget(key="count_sessions", - data=dashboard.get_processed_sessions(project_id=projectId, **{**data, **args})), - *helper.explode_widget(data={**dashboard.get_application_activity(project_id=projectId, **{**data, **args}), - "chart": dashboard.get_performance(project_id=projectId, **{**data, **args}) - .get("chart", [])}), - *helper.explode_widget(data=dashboard.get_page_metrics(project_id=projectId, **{**data, **args})), - *helper.explode_widget(data=dashboard.get_user_activity(project_id=projectId, **{**data, **args})), - *helper.explode_widget(data=dashboard.get_pages_dom_build_time(project_id=projectId, **{**data, **args}), - key="avg_pages_dom_buildtime"), - *helper.explode_widget(data=dashboard.get_pages_response_time(project_id=projectId, **{**data, **args}), - key="avg_pages_response_time"), - *helper.explode_widget(dashboard.get_top_metrics(project_id=projectId, **{**data, **args})), - *helper.explode_widget(data=dashboard.get_time_to_render(project_id=projectId, **{**data, **args}), - key="avg_time_to_render"), - *helper.explode_widget(dashboard.get_memory_consumption(project_id=projectId, **{**data, **args})), - *helper.explode_widget(dashboard.get_avg_cpu(project_id=projectId, **{**data, **args})), - *helper.explode_widget(dashboard.get_avg_fps(project_id=projectId, **{**data, **args})), - ]} diff --git a/api/chalicelib/core/alerts.py b/api/chalicelib/core/alerts.py index 6ed9aff3e..4c8bb151d 100644 --- a/api/chalicelib/core/alerts.py +++ b/api/chalicelib/core/alerts.py @@ -1,10 +1,11 @@ +import json import time -from chalicelib.utils.helper import environ -from chalicelib.core import notifications +from fastapi import BackgroundTasks + +from chalicelib.core import notifications, slack, webhook from chalicelib.utils import pg_client, helper, email_helper from chalicelib.utils.TimeUTC import TimeUTC -import json ALLOW_UPDATE = ["name", "description", "active", "detectionMethod", "query", "options"] @@ -111,7 +112,7 @@ def update(id, changes): return {"data": __process_circular(a)} -def process_notifications(data): +def process_notifications(data, background_tasks: BackgroundTasks): full = {} for n in data: if "message" in n["options"]: @@ -132,7 +133,15 @@ def process_notifications(data): BATCH_SIZE = 200 for t in full.keys(): for i in range(0, len(full[t]), BATCH_SIZE): - helper.async_post(environ['alert_ntf'] % t, {"notifications": full[t][i:i + BATCH_SIZE]}) + # helper.async_post(config('alert_ntf') % t, {"notifications": full[t][i:i + BATCH_SIZE]}) + notifications_list = full[t][i:i + BATCH_SIZE] + + if t == "slack": + background_tasks.add_task(slack.send_batch, notifications_list=notifications_list) + elif t == "email": + background_tasks.add_task(send_by_email_batch, notifications_list=notifications_list) + elif t == "webhook": + background_tasks.add_task(webhook.trigger_batch, data_list=notifications_list) def send_by_email(notification, destination): diff --git a/api/chalicelib/core/announcements.py b/api/chalicelib/core/announcements.py index b4d4f2a22..2ef244751 100644 --- a/api/chalicelib/core/announcements.py +++ b/api/chalicelib/core/announcements.py @@ -1,6 +1,6 @@ from chalicelib.utils import pg_client from chalicelib.utils import helper -from chalicelib.utils.helper import environ +from decouple import config from chalicelib.utils.TimeUTC import TimeUTC @@ -22,7 +22,7 @@ def get_all(user_id): for a in announcements: a["createdAt"] = TimeUTC.datetime_to_timestamp(a["createdAt"]) if a["imageUrl"] is not None and len(a["imageUrl"]) > 0: - a["imageUrl"] = environ["announcement_url"] + a["imageUrl"] + a["imageUrl"] = config("announcement_url") + a["imageUrl"] return announcements diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 12e24cac9..8242e69ff 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -1,9 +1,10 @@ +from chalicelib.utils import pg_client, helper +from chalicelib.core import projects, sessions, sessions_metas import requests +from decouple import config + from chalicelib.core import projects, sessions, sessions_metas from chalicelib.utils import pg_client, helper -from chalicelib.core import projects, sessions, sessions_metas -from chalicelib.utils import pg_client, helper -from chalicelib.utils.helper import environ SESSION_PROJECTION_COLS = """s.project_id, s.session_id::text AS session_id, @@ -23,7 +24,7 @@ SESSION_PROJECTION_COLS = """s.project_id, def get_live_sessions(project_id, filters=None): project_key = projects.get_project_key(project_id) - connected_peers = requests.get(environ["peers"] % environ["S3_KEY"] + f"/{project_key}") + connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}") if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) @@ -67,7 +68,7 @@ def get_live_sessions(project_id, filters=None): def is_live(project_id, session_id, project_key=None): if project_key is None: project_key = projects.get_project_key(project_id) - connected_peers = requests.get(environ["peers"] % environ["S3_KEY"] + f"/{project_key}") + connected_peers = requests.get(config("peers") % config("S3_KEY") + f"/{project_key}") if connected_peers.status_code != 200: print("!! issue with the peer-server") print(connected_peers.text) @@ -77,5 +78,5 @@ def is_live(project_id, session_id, project_key=None): def get_ice_servers(): - return environ.get("iceServers") if environ.get("iceServers") is not None \ - and len(environ["iceServers"]) > 0 else None + return config("iceServers") if config("iceServers", default=None) is not None \ + and len(config("iceServers")) > 0 else None diff --git a/api/chalicelib/core/authorizers.py b/api/chalicelib/core/authorizers.py index 1122b0d65..33a859cc8 100644 --- a/api/chalicelib/core/authorizers.py +++ b/api/chalicelib/core/authorizers.py @@ -1,8 +1,7 @@ -from chalicelib.utils.helper import environ import jwt from chalicelib.utils import helper from chalicelib.utils.TimeUTC import TimeUTC - +from decouple import config from chalicelib.core import tenants from chalicelib.core import users @@ -14,8 +13,8 @@ def jwt_authorizer(token): try: payload = jwt.decode( token[1], - environ["jwt_secret"], - algorithms=environ["jwt_algorithm"], + config("jwt_secret"), + algorithms=config("jwt_algorithm"), audience=[f"plugin:{helper.get_stage_name()}", f"front:{helper.get_stage_name()}"] ) except jwt.ExpiredSignatureError: @@ -43,15 +42,15 @@ def generate_jwt(id, tenant_id, iat, aud): payload={ "userId": id, "tenantId": tenant_id, - "exp": iat // 1000 + int(environ["jwt_exp_delta_seconds"]) + TimeUTC.get_utc_offset() // 1000, - "iss": environ["jwt_issuer"], + "exp": iat // 1000 + config("jwt_exp_delta_seconds",cast=int) + TimeUTC.get_utc_offset() // 1000, + "iss": config("jwt_issuer"), "iat": iat // 1000, "aud": aud }, - key=environ["jwt_secret"], - algorithm=environ["jwt_algorithm"] + key=config("jwt_secret"), + algorithm=config("jwt_algorithm") ) - return token.decode("utf-8") + return token def api_key_authorizer(token): diff --git a/api/chalicelib/core/collaboration_slack.py b/api/chalicelib/core/collaboration_slack.py index b3da03a37..bd0ae7f21 100644 --- a/api/chalicelib/core/collaboration_slack.py +++ b/api/chalicelib/core/collaboration_slack.py @@ -1,5 +1,5 @@ import requests -from chalicelib.utils.helper import environ +from decouple import config from datetime import datetime from chalicelib.core import webhook @@ -95,8 +95,8 @@ class Slack: def share_session(cls, tenant_id, project_id, session_id, user, comment, integration_id=None): args = {"fallback": f"{user} has shared the below session!", "pretext": f"{user} has shared the below session!", - "title": f"{environ['SITE_URL']}/{project_id}/session/{session_id}", - "title_link": f"{environ['SITE_URL']}/{project_id}/session/{session_id}", + "title": f"{config('SITE_URL')}/{project_id}/session/{session_id}", + "title_link": f"{config('SITE_URL')}/{project_id}/session/{session_id}", "text": comment} return {"data": cls.__share_to_slack(tenant_id, integration_id, **args)} @@ -104,8 +104,8 @@ class Slack: def share_error(cls, tenant_id, project_id, error_id, user, comment, integration_id=None): args = {"fallback": f"{user} has shared the below error!", "pretext": f"{user} has shared the below error!", - "title": f"{environ['SITE_URL']}/{project_id}/errors/{error_id}", - "title_link": f"{environ['SITE_URL']}/{project_id}/errors/{error_id}", + "title": f"{config('SITE_URL')}/{project_id}/errors/{error_id}", + "title_link": f"{config('SITE_URL')}/{project_id}/errors/{error_id}", "text": comment} return {"data": cls.__share_to_slack(tenant_id, integration_id, **args)} diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 642501246..2bd4a8d80 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -777,4 +777,4 @@ def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTim return { "data": helper.dict_to_camel_case(row) - } + } \ No newline at end of file diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index 24326902a..7d0bcee7d 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -1,10 +1,10 @@ +import json + import chalicelib.utils.helper from chalicelib.core import events, significance, sessions -from chalicelib.utils.TimeUTC import TimeUTC - -from chalicelib.utils import helper, pg_client from chalicelib.utils import dev -import json +from chalicelib.utils import helper, pg_client +from chalicelib.utils.TimeUTC import TimeUTC REMOVE_KEYS = ["key", "_key", "startDate", "endDate"] diff --git a/api/chalicelib/core/insights.py b/api/chalicelib/core/insights.py new file mode 100644 index 000000000..79b32a4b1 --- /dev/null +++ b/api/chalicelib/core/insights.py @@ -0,0 +1,932 @@ +from chalicelib.core import sessions_metas +from chalicelib.utils import helper, dev +from chalicelib.utils import pg_client +from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils.metrics_helper import __get_step_size +import math +from chalicelib.core.dashboard import __get_constraints, __get_constraint_values + + +def __transform_journey(rows): + nodes = [] + links = [] + for r in rows: + source = r["source_event"][r["source_event"].index("_") + 1:] + target = r["target_event"][r["target_event"].index("_") + 1:] + if source not in nodes: + nodes.append(source) + if target not in nodes: + nodes.append(target) + links.append({"source": nodes.index(source), "target": nodes.index(target), "value": r["value"]}) + return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)} + + +JOURNEY_DEPTH = 5 +JOURNEY_TYPES = { + "PAGES": {"table": "events.pages", "column": "base_path", "table_id": "message_id"}, + "CLICK": {"table": "events.clicks", "column": "label", "table_id": "message_id"}, + # "VIEW": {"table": "events_ios.views", "column": "name", "table_id": "seq_index"}, TODO: enable this for SAAS only + "EVENT": {"table": "events_common.customs", "column": "name", "table_id": "seq_index"} +} + + +@dev.timed +def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args): + pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + event_start = None + event_table = JOURNEY_TYPES["PAGES"]["table"] + event_column = JOURNEY_TYPES["PAGES"]["column"] + event_table_id = JOURNEY_TYPES["PAGES"]["table_id"] + extra_values = {} + for f in filters: + if f["type"] == "START_POINT": + event_start = f["value"] + elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query_subset.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT source_event, + target_event, + count(*) AS value + + FROM (SELECT event_number || '_' || value as target_event, + LAG(event_number || '_' || value, 1) OVER ( PARTITION BY session_rank ) AS source_event + FROM (SELECT value, + session_rank, + message_id, + ROW_NUMBER() OVER ( PARTITION BY session_rank ORDER BY timestamp ) AS event_number + + {f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY timestamp ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN timestamp ELSE NULL END as mark" + if event_start else ""} + + FROM (SELECT session_id, + message_id, + timestamp, + value, + SUM(new_session) OVER (ORDER BY session_id, timestamp) AS session_rank + FROM (SELECT *, + CASE + WHEN source_timestamp IS NULL THEN 1 + ELSE 0 END AS new_session + FROM (SELECT session_id, + {event_table_id} AS message_id, + timestamp, + {event_column} AS value, + LAG(timestamp) + OVER (PARTITION BY session_id ORDER BY timestamp) AS source_timestamp + FROM {event_table} INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_subset)} + ) AS related_events) AS ranked_events) AS processed + {") AS marked) AS maxed WHERE timestamp >= max) AS filtered" if event_start else ""} + ) AS sorted_events + WHERE event_number <= %(JOURNEY_DEPTH)s) AS final + WHERE source_event IS NOT NULL + and target_event IS NOT NULL + GROUP BY source_event, target_event + ORDER BY value DESC + LIMIT 20;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, + **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + + return __transform_journey(rows) + + +def __compute_weekly_percentage(rows): + if rows is None or len(rows) == 0: + return rows + t = -1 + for r in rows: + if r["week"] == 0: + t = r["usersCount"] + r["percentage"] = r["usersCount"] / t + return rows + + +def __complete_retention(rows, start_date, end_date=None): + if rows is None: + return [] + max_week = 10 + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + if i < len(rows) \ + and i != rows[i]["week"]: + rows.insert(i, neutral) + elif i >= len(rows): + rows.append(neutral) + return rows + + +def __complete_acquisition(rows, start_date, end_date=None): + if rows is None: + return [] + max_week = 10 + week = 0 + delta_date = 0 + while max_week > 0: + start_date += TimeUTC.MS_WEEK + if end_date is not None and start_date >= end_date: + break + delta = 0 + if delta_date + week >= len(rows) \ + or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date: + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + rows.insert(delta_date + week + i, neutral) + delta = i + else: + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + if delta_date + week + i < len(rows) \ + and i != rows[delta_date + week + i]["week"]: + rows.insert(delta_date + week + i, neutral) + elif delta_date + week + i >= len(rows): + rows.append(neutral) + delta = i + week += delta + max_week -= 1 + delta_date += 1 + return rows + + +@dev.timed +def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("DATE_TRUNC('week', to_timestamp(start_ts / 1000)) = to_timestamp(%(startTimestamp)s / 1000)") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - DATE_TRUNC('week', to_timestamp(%(startTimestamp)s / 1000)::timestamp)) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT DISTINCT user_id + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess + WHERE bsess.start_ts < %(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + LIMIT 1)) + ) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions + WHERE users_list.user_id = sessions.user_id + AND %(startTimestamp)s <=sessions.start_ts + AND sessions.project_id = %(project_id)s + AND sessions.start_ts < (%(endTimestamp)s - 1) + GROUP BY connexion_week, user_id + ) AS connexions_list ON (TRUE) + GROUP BY week + ORDER BY week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, + FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT user_id, MIN(DATE_TRUNC('week', to_timestamp(start_ts / 1000))) AS first_connexion_week + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess + WHERE bsess.start_ts<%(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + LIMIT 1)) + GROUP BY user_id) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions + WHERE users_list.user_id = sessions.user_id + AND first_connexion_week <= + DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) + AND sessions.project_id = %(project_id)s + AND sessions.start_ts < (%(endTimestamp)s - 1) + GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) + GROUP BY first_connexion_week, week + ORDER BY first_connexion_week, week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + pg_sub_query.append(f"feature.{event_column} = %(value)s") + + with pg_client.PostgresClient() as cur: + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT FLOOR(DATE_PART('day', connexion_week - to_timestamp(%(startTimestamp)s/1000)) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT DISTINCT user_id + FROM sessions INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND DATE_PART('week', to_timestamp((sessions.start_ts - %(startTimestamp)s)/1000)) = 1 + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) + WHERE bsess.start_ts<%(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + AND bfeature.timestamp<%(startTimestamp)s + AND bfeature.{event_column}=%(value)s + LIMIT 1)) + GROUP BY user_id) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions INNER JOIN {event_table} AS feature USING (session_id) + WHERE users_list.user_id = sessions.user_id + AND %(startTimestamp)s <= sessions.start_ts + AND sessions.project_id = %(project_id)s + AND sessions.start_ts < (%(endTimestamp)s - 1) + AND feature.timestamp >= %(startTimestamp)s + AND feature.timestamp < %(endTimestamp)s + AND feature.{event_column} = %(value)s + GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) + GROUP BY week + ORDER BY week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + + pg_sub_query.append(f"feature.{event_column} = %(value)s") + + with pg_client.PostgresClient() as cur: + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT EXTRACT(EPOCH FROM first_connexion_week::date)::bigint*1000 AS first_connexion_week, + FLOOR(DATE_PART('day', connexion_week - first_connexion_week) / 7)::integer AS week, + COUNT(DISTINCT connexions_list.user_id) AS users_count, + ARRAY_AGG(DISTINCT connexions_list.user_id) AS connected_users + FROM (SELECT user_id, DATE_TRUNC('week', to_timestamp(first_connexion_week / 1000)) AS first_connexion_week + FROM(SELECT DISTINCT user_id, MIN(start_ts) AS first_connexion_week + FROM sessions INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND NOT EXISTS((SELECT 1 + FROM sessions AS bsess INNER JOIN {event_table} AS bfeature USING (session_id) + WHERE bsess.start_ts<%(startTimestamp)s + AND project_id = %(project_id)s + AND bsess.user_id = sessions.user_id + AND bfeature.timestamp<%(startTimestamp)s + AND bfeature.{event_column}=%(value)s + LIMIT 1)) + GROUP BY user_id) AS raw_users_list) AS users_list + LEFT JOIN LATERAL (SELECT DATE_TRUNC('week', to_timestamp(start_ts / 1000)::timestamp) AS connexion_week, + user_id + FROM sessions INNER JOIN {event_table} AS feature USING(session_id) + WHERE users_list.user_id = sessions.user_id + AND first_connexion_week <= + DATE_TRUNC('week', to_timestamp(sessions.start_ts / 1000)::timestamp) + AND sessions.project_id = %(project_id)s + AND sessions.start_ts < (%(endTimestamp)s - 1) + AND feature.timestamp >= %(startTimestamp)s + AND feature.timestamp < %(endTimestamp)s + AND feature.{event_column} = %(value)s + GROUP BY connexion_week, user_id) AS connexions_list ON (TRUE) + GROUP BY first_connexion_week, week + ORDER BY first_connexion_week, week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + event_table = JOURNEY_TYPES["CLICK"]["table"] + event_column = JOURNEY_TYPES["CLICK"]["column"] + extra_values = {} + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + all_user_count = cur.fetchone()["count"] + if all_user_count == 0: + return [] + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL + GROUP BY value + ORDER BY count DESC + LIMIT 7;""" + # TODO: solve full scan + print(cur.mogrify(pg_query, params)) + print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + popularity = cur.fetchall() + pg_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY value;""" + # TODO: solve full scan + print(cur.mogrify(pg_query, params)) + print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + frequencies = cur.fetchall() + total_usage = sum([f["count"] for f in frequencies]) + frequencies = {f["value"]: f["count"] for f in frequencies} + for p in popularity: + p["popularity"] = p.pop("count") / all_user_count + p["frequency"] = frequencies[p["value"]] / total_usage + + return popularity + + +@dev.timed +def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + all_user_count = cur.fetchone()["count"] + if all_user_count == 0: + return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": event_value}], } + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_sub_query.append(f"feature.{event_column} = %(value)s") + pg_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND user_id IS NOT NULL;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + adoption = cur.fetchone()["count"] / all_user_count + return {"target": all_user_count, "adoption": adoption, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + +@dev.timed +def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with pg_client.PostgresClient() as cur: + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_sub_query.append(f"feature.{event_column} = %(value)s") + pg_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count + FROM {event_table} AS feature + INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY 1 + ORDER BY 2 DESC + LIMIT 10;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return {"users": helper.list_to_camel_case(rows), + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + +@dev.timed +def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with pg_client.PostgresClient() as cur: + pg_sub_query_chart.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query_chart.append("feature.timestamp < %(endTimestamp)s") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + AND length({event_column})>2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_sub_query_chart.append(f"feature.{event_column} = %(value)s") + pg_query = f"""SELECT generated_timestamp AS timestamp, + COALESCE(COUNT(session_id), 0) AS count + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT DISTINCT session_id + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS users ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp;""" + params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(cur.mogrify(pg_query, params)) + print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return {"chart": helper.list_to_camel_case(rows), + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + +@dev.timed +def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_table = JOURNEY_TYPES["CLICK"]["table"] + event_column = JOURNEY_TYPES["CLICK"]["column"] + extra_values = {} + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + pg_sub_query.append(f"length({event_column})>2") + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY value + ORDER BY avg DESC + LIMIT 7;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # TODO: solve full scan issue + print(cur.mogrify(pg_query, params)) + print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + + return rows + + +@dev.timed +def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], + **args): + pg_sub_query_chart = __get_constraints(project_id=project_id, time_constraint=True, + chart=True, data=args) + + pg_sub_query_chart.append("user_id IS NOT NULL") + period = "DAY" + extra_values = {} + for f in filters: + if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: + period = f["value"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query_chart.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(chart) AS chart + FROM (SELECT generated_timestamp AS timestamp, + COALESCE(COUNT(users), 0) AS count + FROM generate_series(%(startTimestamp)s, %(endTimestamp)s, %(step_size)s) AS generated_timestamp + LEFT JOIN LATERAL ( SELECT DISTINCT user_id + FROM public.sessions + WHERE {" AND ".join(pg_sub_query_chart)} + ) AS users ON (TRUE) + GROUP BY generated_timestamp + ORDER BY generated_timestamp) AS chart;""" + params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, + "project_id": project_id, + "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( + startTimestamp), + "endTimestamp": endTimestamp, **__get_constraint_values(args), + **extra_values} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + row_users = cur.fetchone() + + return row_users + + +@dev.timed +def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args) + pg_sub_query.append("user_id IS NOT NULL") + + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT AVG(count) AS avg, JSONB_AGG(day_users_partition) AS partition + FROM (SELECT number_of_days, COUNT(user_id) AS count + FROM (SELECT user_id, COUNT(DISTINCT DATE_TRUNC('day', to_timestamp(start_ts / 1000))) AS number_of_days + FROM sessions + WHERE {" AND ".join(pg_sub_query)} + GROUP BY 1) AS users_connexions + GROUP BY number_of_days + ORDER BY number_of_days) AS day_users_partition;""" + params = {"project_id": project_id, + "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} + # print(cur.mogrify(pg_query, params)) + # print("---------------------") + cur.execute(cur.mogrify(pg_query, params)) + row_users = cur.fetchone() + + return helper.dict_to_camel_case(row_users) + + +@dev.timed +def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + pg_sub_query = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", + time_constraint=True) + pg_sub_query.append("user_id IS NOT NULL") + pg_sub_query.append("feature.timestamp >= %(startTimestamp)s") + pg_sub_query.append("feature.timestamp < %(endTimestamp)s") + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + pg_sub_query.append(f"sessions.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + pg_sub_query.append(f"feature.{event_column} = %(value)s") + + with pg_client.PostgresClient() as cur: + if default: + # get most used value + pg_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature INNER JOIN public.sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query[:-1])} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + cur.execute(cur.mogrify(pg_query, params)) + row = cur.fetchone() + if row is not None: + event_value = row["value"] + extra_values["value"] = event_value + if len(event_value) > 2: + pg_sub_query.append(f"length({event_column})>2") + pg_query = f"""SELECT user_id, last_time, interactions_count, MIN(start_ts) AS first_seen, MAX(start_ts) AS last_seen + FROM (SELECT user_id, MAX(timestamp) AS last_time, COUNT(DISTINCT session_id) AS interactions_count + FROM {event_table} AS feature INNER JOIN sessions USING (session_id) + WHERE {" AND ".join(pg_sub_query)} + GROUP BY user_id) AS user_last_usage + INNER JOIN sessions USING (user_id) + WHERE EXTRACT(EPOCH FROM now()) * 1000 - last_time > 7 * 24 * 60 * 60 * 1000 + GROUP BY user_id, last_time,interactions_count;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "list": helper.list_to_camel_case(rows) + } + + +@dev.timed +def search(text, feature_type, project_id, platform=None): + if not feature_type: + resource_type = "ALL" + data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform) + return data + + pg_sub_query = __get_constraints(project_id=project_id, time_constraint=True, duration=True, + data={} if platform is None else {"platform": platform}) + + params = {"startTimestamp": TimeUTC.now() - 2 * TimeUTC.MS_MONTH, + "endTimestamp": TimeUTC.now(), + "project_id": project_id, + "value": helper.string_to_sql_like(text.lower()), + "platform_0": platform} + if feature_type == "ALL": + with pg_client.PostgresClient() as cur: + sub_queries = [] + for e in JOURNEY_TYPES: + sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" + FROM {JOURNEY_TYPES[e]["table"]} INNER JOIN public.sessions USING(session_id) + WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[e]["column"]} ILIKE %(value)s + LIMIT 10)""") + pg_query = "UNION ALL".join(sub_queries) + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + elif JOURNEY_TYPES.get(feature_type) is not None: + with pg_client.PostgresClient() as cur: + pg_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" + FROM {JOURNEY_TYPES[feature_type]["table"]} INNER JOIN public.sessions USING(session_id) + WHERE {" AND ".join(pg_sub_query)} AND {JOURNEY_TYPES[feature_type]["column"]} ILIKE %(value)s + LIMIT 10;""" + # print(cur.mogrify(pg_query, params)) + cur.execute(cur.mogrify(pg_query, params)) + rows = cur.fetchall() + else: + return [] + return [helper.dict_to_camel_case(row) for row in rows] \ No newline at end of file diff --git a/api/chalicelib/core/integration_github_issue.py b/api/chalicelib/core/integration_github_issue.py index a9e5e7317..0c2b78720 100644 --- a/api/chalicelib/core/integration_github_issue.py +++ b/api/chalicelib/core/integration_github_issue.py @@ -39,7 +39,7 @@ class GithubIntegrationIssue(BaseIntegrationIssue): for a in assignees: for u in metas["users"]: if a == str(u["id"]): - real_assignees.append(u["login"]) + real_assignees.append(u["name"]) break real_labels = ["OpenReplay"] for l in labels: diff --git a/api/chalicelib/core/jobs.py b/api/chalicelib/core/jobs.py index 7ad4ae4a6..4b7ba85ee 100644 --- a/api/chalicelib/core/jobs.py +++ b/api/chalicelib/core/jobs.py @@ -132,7 +132,7 @@ def get_scheduled_jobs(): def execute_jobs(): jobs = get_scheduled_jobs() if len(jobs) == 0: - print('No jobs to execute.') + # No jobs to execute return for job in jobs: diff --git a/api/chalicelib/core/metadata.py b/api/chalicelib/core/metadata.py index f3f00e4e1..b87a9253a 100644 --- a/api/chalicelib/core/metadata.py +++ b/api/chalicelib/core/metadata.py @@ -90,7 +90,9 @@ def delete(tenant_id, project_id, index: int): cur.execute(query=query) query = cur.mogrify(f"""UPDATE public.sessions SET {colname}= NULL - WHERE project_id = %(project_id)s""", + WHERE project_id = %(project_id)s + AND {colname} IS NOT NULL + """, {"project_id": project_id}) cur.execute(query=query) diff --git a/api/chalicelib/core/mobile.py b/api/chalicelib/core/mobile.py index 12a2d268d..3e60a0826 100644 --- a/api/chalicelib/core/mobile.py +++ b/api/chalicelib/core/mobile.py @@ -1,13 +1,13 @@ from chalicelib.core import projects from chalicelib.utils import s3 -from chalicelib.utils.helper import environ +from decouple import config def sign_keys(project_id, session_id, keys): result = [] project_key = projects.get_project_key(project_id) for k in keys: - result.append(s3.get_presigned_url_for_sharing(bucket=environ["iosBucket"], + result.append(s3.get_presigned_url_for_sharing(bucket=config("iosBucket"), key=f"{project_key}/{session_id}/{k}", expires_in=60 * 60)) return result diff --git a/api/chalicelib/core/projects.py b/api/chalicelib/core/projects.py index a9e1cdf92..1dad5eeb7 100644 --- a/api/chalicelib/core/projects.py +++ b/api/chalicelib/core/projects.py @@ -1,5 +1,6 @@ import json +import schemas from chalicelib.core import users from chalicelib.utils import pg_client, helper, dev from chalicelib.utils.TimeUTC import TimeUTC @@ -96,6 +97,7 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr= row = cur.fetchone() return helper.dict_to_camel_case(row) + def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""\ @@ -117,20 +119,20 @@ def get_project_by_key(tenant_id, project_key, include_last_session=False, inclu return helper.dict_to_camel_case(row) -def create(tenant_id, user_id, data, skip_authorization=False): +def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False): if not skip_authorization: admin = users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: return {"errors": ["unauthorized"]} - return {"data": __create(tenant_id=tenant_id, name=data.get("name", "my first project"))} + return {"data": __create(tenant_id=tenant_id, name=data.name)} -def edit(tenant_id, user_id, project_id, data): +def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema): admin = users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: return {"errors": ["unauthorized"]} return {"data": __update(tenant_id=tenant_id, project_id=project_id, - changes={"name": data.get("name", "my first project")})} + changes={"name": data.name})} def delete(tenant_id, user_id, project_id): diff --git a/api/chalicelib/core/reset_password.py b/api/chalicelib/core/reset_password.py index a8dbabf31..1c32cdddb 100644 --- a/api/chalicelib/core/reset_password.py +++ b/api/chalicelib/core/reset_password.py @@ -1,26 +1,27 @@ +import schemas from chalicelib.core import users from chalicelib.utils import email_helper, captcha, helper -def reset(data): +def reset(data: schemas.ForgetPasswordPayloadSchema): print("====================== reset password ===============") print(data) - if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]): + if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): print("error: Invalid captcha.") return {"errors": ["Invalid captcha."]} if "email" not in data: return {"errors": ["email not found in body"]} if not helper.has_smtp(): return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]} - a_users = users.get_by_email_only(data["email"]) + a_users = users.get_by_email_only(data.email) if len(a_users) > 1: - print(f"multiple users found for [{data['email']}] please contact our support") + print(f"multiple users found for [{data.email}] please contact our support") return {"errors": ["multiple users, please contact our support"]} elif len(a_users) == 1: a_users = a_users[0] invitation_link = users.generate_new_invitation(user_id=a_users["id"]) - email_helper.send_forgot_password(recipient=data["email"], invitation_link=invitation_link) + email_helper.send_forgot_password(recipient=data.email, invitation_link=invitation_link) else: - print(f"invalid email address [{data['email']}]") + print(f"invalid email address [{data.email}]") return {"errors": ["invalid email address"]} return {"data": {"state": "success"}} diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index aa1ab3d58..5edbbb116 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -1,6 +1,6 @@ -from chalicelib.utils import pg_client, helper, dev -from chalicelib.core import events, sessions_metas, socket_ios, metadata, events_ios, \ +from chalicelib.core import events, sessions_metas, metadata, events_ios, \ sessions_mobs, issues, projects, errors, resources, assist +from chalicelib.utils import pg_client, helper, dev SESSION_PROJECTION_COLS = """s.project_id, s.session_id::text AS session_id, diff --git a/api/chalicelib/core/sessions_assignments.py b/api/chalicelib/core/sessions_assignments.py index 3e0929dad..4491f62d0 100644 --- a/api/chalicelib/core/sessions_assignments.py +++ b/api/chalicelib/core/sessions_assignments.py @@ -1,4 +1,4 @@ -from chalicelib.utils.helper import environ as env +from decouple import config from chalicelib.utils import helper from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils import pg_client @@ -32,7 +32,7 @@ def create_new_assignment(tenant_id, project_id, session_id, creator_id, assigne if i is None: return {"errors": [f"integration not found"]} - link = env["SITE_URL"] + f"/{project_id}/session/{session_id}" + link = config("SITE_URL") + f"/{project_id}/session/{session_id}" description += f"\n> {link}" try: issue = integration.issue_handler.create_new_assignment(title=title, assignee=assignee, description=description, diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index 760813b5c..8f61d436b 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -1,14 +1,15 @@ -from chalicelib.utils.helper import environ -from chalicelib.utils.s3 import client +from decouple import config + from chalicelib.utils import s3 +from chalicelib.utils.s3 import client def get_web(sessionId): return client.generate_presigned_url( 'get_object', Params={ - 'Bucket': environ["sessions_bucket"], - 'Key': sessionId + 'Bucket': config("sessions_bucket"), + 'Key': str(sessionId) }, ExpiresIn=100000 ) @@ -18,8 +19,8 @@ def get_ios(sessionId): return client.generate_presigned_url( 'get_object', Params={ - 'Bucket': environ["ios_bucket"], - 'Key': sessionId + 'Bucket': config("ios_bucket"), + 'Key': str(sessionId) }, ExpiresIn=100000 ) @@ -27,4 +28,4 @@ def get_ios(sessionId): def delete_mobs(session_ids): for session_id in session_ids: - s3.schedule_for_deletion(environ["sessions_bucket"], session_id) + s3.schedule_for_deletion(config("sessions_bucket"), session_id) diff --git a/api/chalicelib/core/signup.py b/api/chalicelib/core/signup.py index 50fc6e41a..b4f02f0b8 100644 --- a/api/chalicelib/core/signup.py +++ b/api/chalicelib/core/signup.py @@ -1,21 +1,24 @@ -from chalicelib.utils import helper -from chalicelib.utils import pg_client +import json + +from decouple import config + +import schemas from chalicelib.core import users, telemetry, tenants from chalicelib.utils import captcha -import json +from chalicelib.utils import helper +from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.helper import environ -def create_step1(data): +def create_step1(data: schemas.UserSignupSchema): print(f"===================== SIGNUP STEP 1 AT {TimeUTC.to_human_readable(TimeUTC.now())} UTC") errors = [] if tenants.tenants_exists(): return {"errors": ["tenants already registered"]} - email = data.get("email") + email = data.email print(f"=====================> {email}") - password = data.get("password") + password = data.password print("Verifying email validity") if email is None or len(email) < 5 or not helper.is_valid_email(email): @@ -28,25 +31,25 @@ def create_step1(data): errors.append("Email address previously deleted.") print("Verifying captcha") - if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]): + if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): errors.append("Invalid captcha.") print("Verifying password validity") - if len(data["password"]) < 6: + if len(password) < 6: errors.append("Password is too short, it must be at least 6 characters long.") print("Verifying fullname validity") - fullname = data.get("fullname") + fullname = data.fullname if fullname is None or len(fullname) < 1 or not helper.is_alphabet_space_dash(fullname): errors.append("Invalid full name.") print("Verifying company's name validity") - company_name = data.get("organizationName") + company_name = data.organizationName if company_name is None or len(company_name) < 1 or not helper.is_alphanumeric_space(company_name): errors.append("invalid organization's name") print("Verifying project's name validity") - project_name = data.get("projectName") + project_name = data.projectName if project_name is None or len(project_name) < 1: project_name = "my first project" @@ -61,7 +64,7 @@ def create_step1(data): "projectName": project_name, "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), "organizationName": company_name, - "versionNumber": environ["version_number"] + "versionNumber": config("version_number") } query = f"""\ WITH t AS ( diff --git a/api/chalicelib/core/slack.py b/api/chalicelib/core/slack.py index 411be0567..0bd715f5e 100644 --- a/api/chalicelib/core/slack.py +++ b/api/chalicelib/core/slack.py @@ -1,5 +1,5 @@ from datetime import datetime -from chalicelib.utils.helper import environ +from decouple import config from chalicelib.core.collaboration_slack import Slack @@ -10,7 +10,7 @@ def send(notification, destination): return Slack.send_text(tenant_id=notification["tenantId"], webhook_id=destination, text=notification["description"] \ - + f"\n<{environ['SITE_URL']}{notification['buttonUrl']}|{notification['buttonText']}>", + + f"\n<{config('SITE_URL')}{notification['buttonUrl']}|{notification['buttonText']}>", title=notification["title"], title_link=notification["buttonUrl"], ) @@ -23,7 +23,7 @@ def send_batch(notifications_list): if n.get("destination") not in webhookId_map: webhookId_map[n.get("destination")] = {"tenantId": n["notification"]["tenantId"], "batch": []} webhookId_map[n.get("destination")]["batch"].append({"text": n["notification"]["description"] \ - + f"\n<{environ['SITE_URL']}{n['notification']['buttonUrl']}|{n['notification']['buttonText']}>", + + f"\n<{config('SITE_URL')}{n['notification']['buttonUrl']}|{n['notification']['buttonText']}>", "title": n["notification"]["title"], "title_link": n["notification"]["buttonUrl"], "ts": datetime.now().timestamp()}) diff --git a/api/chalicelib/core/socket_ios.py b/api/chalicelib/core/socket_ios.py index d925797fe..50e4d025c 100644 --- a/api/chalicelib/core/socket_ios.py +++ b/api/chalicelib/core/socket_ios.py @@ -1,10 +1,10 @@ import requests -from chalicelib.utils.helper import environ +from decouple import config from chalicelib.core import projects def start_replay(project_id, session_id, device, os_version, mob_url): - r = requests.post(environ["IOS_MIDDLEWARE"] + "/replay", json={ + r = requests.post(config("IOS_MIDDLEWARE") + "/replay", json={ "projectId": project_id, "projectKey": projects.get_project_key(project_id), "sessionId": session_id, @@ -18,5 +18,5 @@ def start_replay(project_id, session_id, device, os_version, mob_url): print(r.text) return r.text result = r.json() - result["url"] = environ["IOS_MIDDLEWARE"] + result["url"] = config("IOS_MIDDLEWARE") return result diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py index 01204847c..73341cb4d 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps.py @@ -1,4 +1,4 @@ -from chalicelib.utils.helper import environ +from decouple import config from chalicelib.utils import helper from chalicelib.utils import s3 @@ -17,7 +17,7 @@ def __get_key(project_id, url): def presign_share_urls(project_id, urls): results = [] for u in urls: - results.append(s3.get_presigned_url_for_sharing(bucket=environ['sourcemaps_bucket'], expires_in=120, + results.append(s3.get_presigned_url_for_sharing(bucket=config('sourcemaps_bucket'), expires_in=120, key=__get_key(project_id, u), check_exists=True)) return results @@ -26,7 +26,7 @@ def presign_share_urls(project_id, urls): def presign_upload_urls(project_id, urls): results = [] for u in urls: - results.append(s3.get_presigned_url_for_upload(bucket=environ['sourcemaps_bucket'], + results.append(s3.get_presigned_url_for_upload(bucket=config('sourcemaps_bucket'), expires_in=1800, key=__get_key(project_id, u))) return results @@ -87,7 +87,7 @@ def get_traces_group(project_id, payload): print(key) print("===============================") if key not in payloads: - file_exists = s3.exists(environ['sourcemaps_bucket'], key) + file_exists = s3.exists(config('sourcemaps_bucket'), key) all_exists = all_exists and file_exists if not file_exists: print(f"{u['absPath']} sourcemap (key '{key}') doesn't exist in S3") @@ -130,10 +130,10 @@ def fetch_missed_contexts(frames): if frames[i]["frame"]["absPath"] in source_cache: file = source_cache[frames[i]["frame"]["absPath"]] else: - file = s3.get_file(environ['js_cache_bucket'], get_js_cache_path(frames[i]["frame"]["absPath"])) + file = s3.get_file(config('js_cache_bucket'), get_js_cache_path(frames[i]["frame"]["absPath"])) if file is None: print( - f"File {get_js_cache_path(frames[i]['frame']['absPath'])} not found in {environ['js_cache_bucket']}") + f"File {get_js_cache_path(frames[i]['frame']['absPath'])} not found in {config('js_cache_bucket')}") source_cache[frames[i]["frame"]["absPath"]] = file if file is None: continue diff --git a/api/chalicelib/core/sourcemaps_parser.py b/api/chalicelib/core/sourcemaps_parser.py index d6e7414ba..83116aed7 100644 --- a/api/chalicelib/core/sourcemaps_parser.py +++ b/api/chalicelib/core/sourcemaps_parser.py @@ -1,6 +1,6 @@ import requests -from chalicelib.utils.helper import environ +from decouple import config def get_original_trace(key, positions): @@ -8,13 +8,13 @@ def get_original_trace(key, positions): "key": key, "positions": positions, "padding": 5, - "bucket": environ['sourcemaps_bucket'], - "S3_HOST": environ['S3_HOST'], - "S3_KEY": environ['S3_KEY'], - "S3_SECRET": environ['S3_SECRET'], - "region": environ['sessions_region'] + "bucket": config('sourcemaps_bucket'), + "S3_HOST": config('S3_HOST'), + "S3_KEY": config('S3_KEY'), + "S3_SECRET": config('S3_SECRET'), + "region": config('sessions_region') } - r = requests.post(environ["sourcemaps_reader"], json=payload) + r = requests.post(config("sourcemaps_reader"), json=payload) if r.status_code != 200: return {} diff --git a/api/chalicelib/core/tenants.py b/api/chalicelib/core/tenants.py index 054b3f5d5..db154525c 100644 --- a/api/chalicelib/core/tenants.py +++ b/api/chalicelib/core/tenants.py @@ -1,3 +1,4 @@ +import schemas from chalicelib.utils import pg_client from chalicelib.utils import helper from chalicelib.core import users @@ -62,18 +63,18 @@ def edit_client(tenant_id, changes): return helper.dict_to_camel_case(cur.fetchone()) -def update(tenant_id, user_id, data): +def update(tenant_id, user_id, data: schemas.UpdateTenantSchema): admin = users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: return {"error": "unauthorized"} - if "name" not in data and "optOut" not in data: + if data.name is None and data.opt_out is None: return {"errors": ["please provide 'name' of 'optOut' attribute for update"]} changes = {} - if "name" in data: - changes["name"] = data["name"] - if "optOut" in data: - changes["optOut"] = data["optOut"] + if data.name is not None and len(data.name) > 0: + changes["name"] = data.name + if data.opt_out is not None: + changes["optOut"] = data.opt_out return edit_client(tenant_id=tenant_id, changes=changes) diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 2af50ce57..1461c6e14 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -1,16 +1,15 @@ import json import secrets -from chalicelib.core import authorizers, metadata, projects, assist -from chalicelib.core import tenants -from chalicelib.utils import dev +from decouple import config +from fastapi import BackgroundTasks + +from chalicelib.core import authorizers, metadata, projects +from chalicelib.core import tenants, assist +from chalicelib.utils import dev, email_helper from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.helper import environ - -from chalicelib.core import tenants, assist -import secrets def __generate_invitation_token(): @@ -182,7 +181,7 @@ def update(tenant_id, user_id, changes): return helper.dict_to_camel_case(cur.fetchone()) -def create_member(tenant_id, user_id, data): +def create_member(tenant_id, user_id, data, background_tasks: BackgroundTasks): admin = get(tenant_id=tenant_id, user_id=user_id) if not admin["admin"] and not admin["superAdmin"]: return {"errors": ["unauthorized"]} @@ -205,18 +204,25 @@ def create_member(tenant_id, user_id, data): new_member = create_new_member(email=data["email"], invitation_token=invitation_token, admin=data.get("admin", False), name=name) new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken")) - helper.async_post(environ['email_basic'] % 'member_invitation', - { - "email": data["email"], - "invitationLink": new_member["invitationLink"], - "clientId": tenants.get_by_tenant_id(tenant_id)["name"], - "senderName": admin["name"] - }) + + # helper.async_post(config('email_basic') % 'member_invitation', + # { + # "email": data["email"], + # "invitationLink": new_member["invitationLink"], + # "clientId": tenants.get_by_tenant_id(tenant_id)["name"], + # "senderName": admin["name"] + # }) + background_tasks.add_task(email_helper.send_team_invitation, **{ + "recipient": data["email"], + "invitation_link": new_member["invitationLink"], + "client_id": tenants.get_by_tenant_id(tenant_id)["name"], + "sender_name": admin["name"] + }) return {"data": new_member} def __get_invitation_link(invitation_token): - return environ["SITE_URL"] + environ["invitation_link"] % invitation_token + return config("SITE_URL") + config("invitation_link") % invitation_token def allow_password_change(user_id, delta_min=10): @@ -282,12 +288,15 @@ def edit(user_id_to_update, tenant_id, changes, editor_id): admin = get(tenant_id=tenant_id, user_id=editor_id) if not admin["superAdmin"] and not admin["admin"]: return {"errors": ["unauthorized"]} - if user["superAdmin"]: - changes.pop("admin") + if editor_id == user_id_to_update: + if user["superAdmin"]: + changes.pop("admin") + elif user["admin"] != changes["admin"]: + return {"errors": ["cannot change your own role"]} keys = list(changes.keys()) for k in keys: - if k not in ALLOW_EDIT: + if k not in ALLOW_EDIT or changes[k] is None: changes.pop(k) keys = list(changes.keys()) @@ -441,7 +450,7 @@ def change_password(tenant_id, user_id, email, old_password, new_password): c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True) c["smtp"] = helper.has_smtp() - c["iceServers"]= assist.get_ice_servers() + c["iceServers"] = assist.get_ice_servers() return { 'jwt': r.pop('jwt'), 'data': { @@ -469,7 +478,7 @@ def set_password_invitation(user_id, new_password): c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, stack_integrations=True) c["smtp"] = helper.has_smtp() - c["iceServers"]= assist.get_ice_servers() + c["iceServers"] = assist.get_ice_servers() return { 'jwt': r.pop('jwt'), 'data': { diff --git a/api/chalicelib/core/webhook.py b/api/chalicelib/core/webhook.py index fff2d4e7e..653a2b513 100644 --- a/api/chalicelib/core/webhook.py +++ b/api/chalicelib/core/webhook.py @@ -76,6 +76,12 @@ def update(tenant_id, webhook_id, changes, replace_none=False): allow_update = ["name", "index", "authHeader", "endpoint"] with pg_client.PostgresClient() as cur: sub_query = [f"{helper.key_to_snake_case(k)} = %({k})s" for k in changes.keys() if k in allow_update] + print(cur.mogrify(f"""\ + UPDATE public.webhooks + SET {','.join(sub_query)} + WHERE webhook_id =%(id)s AND deleted_at ISNULL + RETURNING webhook_id AS integration_id, webhook_id AS id,*;""", + {"id": webhook_id, **changes})) cur.execute( cur.mogrify(f"""\ UPDATE public.webhooks @@ -114,7 +120,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="", def add_edit(tenant_id, data, replace_none=None): - if "webhookId" in data: + if data.get("webhookId") is not None: return update(tenant_id=tenant_id, webhook_id=data["webhookId"], changes={"endpoint": data["endpoint"], "authHeader": None if "authHeader" not in data else data["authHeader"], diff --git a/api/chalicelib/core/weekly_report.py b/api/chalicelib/core/weekly_report.py index e0e6e0fa5..cb4fab769 100644 --- a/api/chalicelib/core/weekly_report.py +++ b/api/chalicelib/core/weekly_report.py @@ -1,6 +1,6 @@ from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.helper import environ +from decouple import config from chalicelib.utils.helper import get_issue_title LOWEST_BAR_VALUE = 3 @@ -227,7 +227,7 @@ def cron(): if j["type"] in keep_types: keep.append(j) i["partition"] = keep - helper.async_post(environ['email_funnel'] % "weekly_report2", + helper.async_post(config('email_funnel') % "weekly_report2", {"email": p.pop("emails"), "data": { **p, diff --git a/api/chalicelib/utils/TimeUTC.py b/api/chalicelib/utils/TimeUTC.py index c95359a00..bac7a027f 100644 --- a/api/chalicelib/utils/TimeUTC.py +++ b/api/chalicelib/utils/TimeUTC.py @@ -1,6 +1,9 @@ -from datetime import datetime, timedelta from calendar import monthrange -import pytz +from datetime import datetime, timedelta + +import zoneinfo + +UTC_ZI = zoneinfo.ZoneInfo("UTC") class TimeUTC: @@ -9,20 +12,20 @@ class TimeUTC: MS_DAY = MS_HOUR * 24 MS_WEEK = MS_DAY * 7 MS_MONTH = MS_DAY * 30 - MS_MONTH_TRUE = monthrange(datetime.now(pytz.utc).astimezone(pytz.utc).year, - datetime.now(pytz.utc).astimezone(pytz.utc).month)[1] * MS_DAY + MS_MONTH_TRUE = monthrange(datetime.now(UTC_ZI).astimezone(UTC_ZI).year, + datetime.now(UTC_ZI).astimezone(UTC_ZI).month)[1] * MS_DAY RANGE_VALUE = None @staticmethod def midnight(delta_days=0): - return int((datetime.now(pytz.utc) + timedelta(delta_days)) \ + return int((datetime.now(UTC_ZI) + timedelta(delta_days)) \ .replace(hour=0, minute=0, second=0, microsecond=0) \ - .astimezone(pytz.utc).timestamp() * 1000) + .astimezone(UTC_ZI).timestamp() * 1000) @staticmethod def __now(delta_days=0, delta_minutes=0, delta_seconds=0): - return (datetime.now(pytz.utc) + timedelta(days=delta_days, minutes=delta_minutes, seconds=delta_seconds)) \ - .astimezone(pytz.utc) + return (datetime.now(UTC_ZI) + timedelta(days=delta_days, minutes=delta_minutes, seconds=delta_seconds)) \ + .astimezone(UTC_ZI) @staticmethod def now(delta_days=0, delta_minutes=0, delta_seconds=0): @@ -32,28 +35,28 @@ class TimeUTC: @staticmethod def month_start(delta_month=0): month = TimeUTC.__now().month + delta_month - return int(datetime.now(pytz.utc) \ + return int(datetime.now(UTC_ZI) \ .replace(year=TimeUTC.__now().year + ((-12 + month) // 12 if month % 12 <= 0 else month // 12), month=12 + month % 12 if month % 12 <= 0 else month % 12 if month > 12 else month, day=1, hour=0, minute=0, second=0, microsecond=0) \ - .astimezone(pytz.utc).timestamp() * 1000) + .astimezone(UTC_ZI).timestamp() * 1000) @staticmethod def year_start(delta_year=0): - return int(datetime.now(pytz.utc) \ + return int(datetime.now(UTC_ZI) \ .replace(year=TimeUTC.__now().year + delta_year, month=1, day=1, hour=0, minute=0, second=0, microsecond=0) \ - .astimezone(pytz.utc).timestamp() * 1000) + .astimezone(UTC_ZI).timestamp() * 1000) @staticmethod def custom(year=None, month=None, day=None, hour=None, minute=None): args = locals() - return int(datetime.now(pytz.utc) \ + return int(datetime.now(UTC_ZI) \ .replace(**{key: args[key] for key in args if args[key] is not None}, second=0, microsecond=0) \ - .astimezone(pytz.utc).timestamp() * 1000) + .astimezone(UTC_ZI).timestamp() * 1000) @staticmethod def future(delta_day, delta_hour, delta_minute, minutes_period=None, start=None): @@ -78,7 +81,7 @@ class TimeUTC: @staticmethod def from_ms_timestamp(ts): - return datetime.fromtimestamp(ts // 1000, pytz.utc) + return datetime.fromtimestamp(ts // 1000, UTC_ZI) @staticmethod def to_human_readable(ts, fmt='%Y-%m-%d %H:%M:%S UTC'): @@ -113,14 +116,14 @@ class TimeUTC: @staticmethod def get_utc_offset(): - return int((datetime.now(pytz.utc).now() - datetime.now(pytz.utc).replace(tzinfo=None)).total_seconds() * 1000) + return int((datetime.now(UTC_ZI).now() - datetime.now(UTC_ZI).replace(tzinfo=None)).total_seconds() * 1000) @staticmethod def trunc_day(timestamp): dt = TimeUTC.from_ms_timestamp(timestamp) return TimeUTC.datetime_to_timestamp(dt .replace(hour=0, minute=0, second=0, microsecond=0) - .astimezone(pytz.utc)) + .astimezone(UTC_ZI)) @staticmethod def trunc_week(timestamp): @@ -128,4 +131,4 @@ class TimeUTC: start = dt - timedelta(days=dt.weekday()) return TimeUTC.datetime_to_timestamp(start .replace(hour=0, minute=0, second=0, microsecond=0) - .astimezone(pytz.utc)) + .astimezone(UTC_ZI)) diff --git a/api/chalicelib/utils/captcha.py b/api/chalicelib/utils/captcha.py index f499a6da9..741031071 100644 --- a/api/chalicelib/utils/captcha.py +++ b/api/chalicelib/utils/captcha.py @@ -1,10 +1,10 @@ -from chalicelib.utils.helper import environ as env +from decouple import config import requests from chalicelib.utils import helper def __get_captcha_config(): - return env["captcha_server"], env["captcha_key"] + return config("captcha_server"), config("captcha_key") def is_valid(response): diff --git a/api/chalicelib/utils/email_handler.py b/api/chalicelib/utils/email_handler.py index f7a7fd61b..66b8a3afd 100644 --- a/api/chalicelib/utils/email_handler.py +++ b/api/chalicelib/utils/email_handler.py @@ -6,7 +6,7 @@ from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from chalicelib.utils import helper, smtp -from chalicelib.utils.helper import environ +from decouple import config def __get_subject(subject): @@ -16,7 +16,7 @@ def __get_subject(subject): def __get_html_from_file(source, formatting_variables): if formatting_variables is None: formatting_variables = {} - formatting_variables["frontend_url"] = environ["SITE_URL"] + formatting_variables["frontend_url"] = config("SITE_URL") with open(source, "r") as body: BODY_HTML = body.read() if formatting_variables is not None and len(formatting_variables.keys()) > 0: @@ -50,7 +50,7 @@ def send_html(BODY_HTML, SUBJECT, recipient, bcc=None): recipient = [recipient] msg = MIMEMultipart() msg['Subject'] = Header(__get_subject(SUBJECT), 'utf-8') - msg['From'] = environ["EMAIL_FROM"] + msg['From'] = config("EMAIL_FROM") msg['To'] = "" body = MIMEText(BODY_HTML.encode('utf-8'), 'html', "utf-8") msg.attach(body) @@ -75,7 +75,7 @@ def send_text(recipients, text, subject): with smtp.SMTPClient() as s: msg = MIMEMultipart() msg['Subject'] = Header(__get_subject(subject), 'utf-8') - msg['From'] = environ["EMAIL_FROM"] + msg['From'] = config("EMAIL_FROM") msg['To'] = ", ".join(recipients) body = MIMEText(text) msg.attach(body) diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index 1a743a57c..a227a582b 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -6,17 +6,15 @@ import math import requests local_prefix = 'local-' -from os import environ, path - -import json +from decouple import config def get_version_number(): - return environ["version"] + return config("version") def get_stage_name(): - stage = environ["stage"] + stage = config("stage") return stage[len(local_prefix):] if stage.startswith(local_prefix) else stage @@ -33,7 +31,7 @@ def is_onprem(): def is_local(): - return environ["stage"].startswith(local_prefix) + return config("stage").startswith(local_prefix) def generate_salt(): @@ -135,16 +133,16 @@ def __sbool_to_bool(value): def allow_captcha(): - return environ.get("captcha_server") is not None and environ.get("captcha_key") is not None \ - and len(environ["captcha_server"]) > 0 and len(environ["captcha_key"]) > 0 + return config("captcha_server", default=None) is not None and config("captcha_key", default=None) is not None \ + and len(config("captcha_server")) > 0 and len(config("captcha_key")) > 0 def allow_sentry(): - return environ.get("sentryURL") is not None and len(environ["sentryURL"]) > 0 + return config("sentryURL", default=None) is not None and len(config("sentryURL")) > 0 def async_post(endpoint, data): - data["auth"] = environ["async_Token"] + data["auth"] = config("async_Token") try: requests.post(endpoint, timeout=1, json=data) except requests.exceptions.ReadTimeout: @@ -328,46 +326,9 @@ def __decimal_limit(value, limit): return value / factor -def is_free_open_source_edition(): - return __sbool_to_bool(environ.get("isFOS")) - - -def is_enterprise_edition(): - return __sbool_to_bool(environ.get("isEE")) - - -stag_config_file = f"chalicelib/.configs/{environ['stage']}.json" -if not path.isfile(stag_config_file): - print("!! stage config file not found, using .chalice/config.json only") -else: - print("!! stage config file found, merging with priority to .chalice/config.json") - with open(stag_config_file) as json_file: - config = json.load(json_file) - environ = {**config, **environ} - -if (is_free_open_source_edition() or is_enterprise_edition()) and environ.get("config_file"): - if not path.isfile(environ.get("config_file")): - print("!! config file not found, using default environment") - else: - with open(environ.get("config_file")) as json_file: - config = json.load(json_file) - environ = {**environ, **config} - - -def get_internal_project_id(project_id64): - if project_id64 < 0x10000000000000 or project_id64 >= 0x20000000000000: - return None - - project_id64 = (project_id64 - 0x10000000000000) * 4212451012670231 & 0xfffffffffffff - if project_id64 > 0xffffffff: - return None - project_id = int(project_id64) - return project_id - - def has_smtp(): - return environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0 + return config("EMAIL_HOST") is not None and len(config("EMAIL_HOST")) > 0 def get_edition(): - return "foss" if is_free_open_source_edition() else "ee" + return "ee" if "ee" in config("ENTERPRISE_BUILD", default="").lower() else "foss" diff --git a/api/chalicelib/utils/pg_client.py b/api/chalicelib/utils/pg_client.py index c54e514ec..8fb869367 100644 --- a/api/chalicelib/utils/pg_client.py +++ b/api/chalicelib/utils/pg_client.py @@ -1,12 +1,12 @@ import psycopg2 import psycopg2.extras -from chalicelib.utils.helper import environ +from decouple import config -PG_CONFIG = {"host": environ["pg_host"], - "database": environ["pg_dbname"], - "user": environ["pg_user"], - "password": environ["pg_password"], - "port": int(environ["pg_port"])} +PG_CONFIG = {"host": config("pg_host"), + "database": config("pg_dbname"), + "user": config("pg_user"), + "password": config("pg_password"), + "port": config("pg_port", cast=int)} from psycopg2 import pool from threading import Semaphore diff --git a/api/chalicelib/utils/s3.py b/api/chalicelib/utils/s3.py index 176a19fa2..67e1eafd2 100644 --- a/api/chalicelib/utils/s3.py +++ b/api/chalicelib/utils/s3.py @@ -1,24 +1,24 @@ from botocore.exceptions import ClientError -from chalicelib.utils.helper import environ +from decouple import config from datetime import datetime, timedelta import boto3 import botocore from botocore.client import Config -client = boto3.client('s3', endpoint_url=environ["S3_HOST"], - aws_access_key_id=environ["S3_KEY"], - aws_secret_access_key=environ["S3_SECRET"], +client = boto3.client('s3', endpoint_url=config("S3_HOST"), + aws_access_key_id=config("S3_KEY"), + aws_secret_access_key=config("S3_SECRET"), config=Config(signature_version='s3v4'), - region_name=environ["sessions_region"]) + region_name=config("sessions_region")) def exists(bucket, key): try: - boto3.resource('s3', endpoint_url=environ["S3_HOST"], - aws_access_key_id=environ["S3_KEY"], - aws_secret_access_key=environ["S3_SECRET"], + boto3.resource('s3', endpoint_url=config("S3_HOST"), + aws_access_key_id=config("S3_KEY"), + aws_secret_access_key=config("S3_SECRET"), config=Config(signature_version='s3v4'), - region_name=environ["sessions_region"]) \ + region_name=config("sessions_region")) \ .Object(bucket, key).load() except botocore.exceptions.ClientError as e: if e.response['Error']['Code'] == "404": @@ -73,21 +73,21 @@ def get_file(source_bucket, source_key): def rename(source_bucket, source_key, target_bucket, target_key): - s3 = boto3.resource('s3', endpoint_url=environ["S3_HOST"], - aws_access_key_id=environ["S3_KEY"], - aws_secret_access_key=environ["S3_SECRET"], + s3 = boto3.resource('s3', endpoint_url=config("S3_HOST"), + aws_access_key_id=config("S3_KEY"), + aws_secret_access_key=config("S3_SECRET"), config=Config(signature_version='s3v4'), - region_name=environ["sessions_region"]) + region_name=config("sessions_region")) s3.Object(target_bucket, target_key).copy_from(CopySource=f'{source_bucket}/{source_key}') s3.Object(source_bucket, source_key).delete() def schedule_for_deletion(bucket, key): - s3 = boto3.resource('s3', endpoint_url=environ["S3_HOST"], - aws_access_key_id=environ["S3_KEY"], - aws_secret_access_key=environ["S3_SECRET"], + s3 = boto3.resource('s3', endpoint_url=config("S3_HOST"), + aws_access_key_id=config("S3_KEY"), + aws_secret_access_key=config("S3_SECRET"), config=Config(signature_version='s3v4'), - region_name=environ["sessions_region"]) + region_name=config("sessions_region")) s3_object = s3.Object(bucket, key) s3_object.copy_from(CopySource={'Bucket': bucket, 'Key': key}, Expires=datetime.now() + timedelta(days=7), diff --git a/api/chalicelib/utils/s3urls.py b/api/chalicelib/utils/s3urls.py deleted file mode 100644 index bc0b39bea..000000000 --- a/api/chalicelib/utils/s3urls.py +++ /dev/null @@ -1,120 +0,0 @@ -import re -from urllib.parse import urlparse - - -def style(url): - """ Determine 'style' of a given S3 url - - >>> style("s3://my-bucket/my-key/") - 's3' - - >>> style("s3://user@my-bucket/my-key/") - 's3-credential' - - >>> style("https://my-bucket.s3.amazonaws.com/my-key/") - 'bucket-in-netloc' - - >>> style("https://s3.amazonaws.com/my-bucket/my-key/") - 'bucket-in-path' - """ - o = urlparse(url) - if o.scheme == 's3': - if '@' in o.netloc: - return 's3-credential' - else: - return 's3' - - if re.search(r'^s3[.-](\w{2}-\w{4,9}-\d\.)?amazonaws\.com', o.netloc): - return 'bucket-in-path' - - if re.search(r'\.s3[.-](\w{2}-\w{4,9}-\d\.)?amazonaws\.com', o.netloc): - return 'bucket-in-netloc' - - raise ValueError(f'Unknown url style: {url}') - - -def build_url(url_type, bucket, key=None, region=None, credential_name=None): - """ Construct an S3 URL - - Args: - url_type: one of 's3', 's3-credential', 'bucket-in-path', 'bucket-in-netloc' - bucket: S3 bucket name - key: Key within bucket (optional) - region: S3 region name (optional) - credential_name: user/credential name to use in S3 scheme url (optional) - - Returns - (string) S3 URL - """ - if url_type == 's3': - credential = f'{credential_name}@' if credential_name else "" - return f's3://{credential}{bucket}/{key or ""}' - - if url_type == 'bucket-in-path': - return f'https://s3{"-" if region else ""}{region or ""}.amazonaws.com/{bucket}/{key}' - - if url_type == 'bucket-in-netloc': - return f'https://{bucket}.s3.amazonaws.com/{key}' - - raise ValueError(f'Invalid url_type: {url_type}') - - -def parse_s3_credential_url(url): - """ Parse S3 scheme url containing a user/credential name - - >>> parse_s3_url("s3://user@my-bucket/my-key") - {'bucket': 'my-bucket', 'key': 'my-key/', 'credential_name': 'user'} - """ - o = urlparse(url) - cred_name, bucket = o.netloc.split('@') - key = o.path if o.path[0] != '/' else o.path[1:] - return {'bucket': bucket, 'key': key, 'credential_name': cred_name} - - -def parse_s3_url(url): - """ Parse S3 scheme url - - >>> parse_s3_url("s3://my-bucket/my-key") - {'bucket': 'my-bucket', 'key': 'my-key/'} - """ - o = urlparse(url) - bucket = o.netloc - key = o.path if o.path[0] != '/' else o.path[1:] - return {'bucket': bucket, 'key': key} - - -def parse_bucket_in_path_url(url): - """ Parse url with bucket name path - - >>> parse_bucket_in_path_url("https://s3-eu-west-1.amazonaws.com/my-bucket/my-key/") - {'bucket': 'my-bucket', 'key': 'my-key/'} - """ - path = urlparse(url).path - bucket = path.split('/')[1] - key = '/'.join(path.split('/')[2:]) - return {'bucket': bucket, 'key': key} - - -def parse_bucket_in_netloc_url(url): - """ Parse url with bucket name in host/netloc - - >>> parse_bucket_in_netloc_url("https://my-bucket.s3.amazonaws.com/my-key/") - {'bucket': 'my-bucket', 'key': 'my-key/'} - """ - o = urlparse(url) - bucket = o.netloc.split('.')[0] - key = o.path if o.path[0] != '/' else o.path[1:] - return {'bucket': bucket, 'key': key} - - -def parse_url(url): - url_style = style(url) - - if url_style == 's3-credential': - return parse_s3_credential_url(url) - if url_style == 's3': - return parse_s3_url(url) - if url_style == 'bucket-in-path': - return parse_bucket_in_path_url(url) - if url_style == 'bucket-in-netloc': - return parse_bucket_in_netloc_url(url) diff --git a/api/chalicelib/utils/smtp.py b/api/chalicelib/utils/smtp.py index 90c95693b..3615ca71a 100644 --- a/api/chalicelib/utils/smtp.py +++ b/api/chalicelib/utils/smtp.py @@ -1,5 +1,5 @@ import smtplib -from chalicelib.utils.helper import environ +from decouple import config class EmptySMTP: @@ -11,26 +11,26 @@ class SMTPClient: server = None def __init__(self): - if environ["EMAIL_HOST"] is None or len(environ["EMAIL_HOST"]) == 0: + if config("EMAIL_HOST") is None or len(config("EMAIL_HOST")) == 0: return - elif environ["EMAIL_USE_SSL"].lower() == "false": - self.server = smtplib.SMTP(host=environ["EMAIL_HOST"], port=int(environ["EMAIL_PORT"])) + elif config("EMAIL_USE_SSL").lower() == "false": + self.server = smtplib.SMTP(host=config("EMAIL_HOST"), port=int(config("EMAIL_PORT"))) else: - if len(environ["EMAIL_SSL_KEY"]) == 0 or len(environ["EMAIL_SSL_CERT"]) == 0: - self.server = smtplib.SMTP_SSL(host=environ["EMAIL_HOST"], port=int(environ["EMAIL_PORT"])) + if len(config("EMAIL_SSL_KEY")) == 0 or len(config("EMAIL_SSL_CERT")) == 0: + self.server = smtplib.SMTP_SSL(host=config("EMAIL_HOST"), port=int(config("EMAIL_PORT"))) else: - self.server = smtplib.SMTP_SSL(host=environ["EMAIL_HOST"], port=int(environ["EMAIL_PORT"]), - keyfile=environ["EMAIL_SSL_KEY"], certfile=environ["EMAIL_SSL_CERT"]) + self.server = smtplib.SMTP_SSL(host=config("EMAIL_HOST"), port=int(config("EMAIL_PORT")), + keyfile=config("EMAIL_SSL_KEY"), certfile=config("EMAIL_SSL_CERT")) def __enter__(self): if self.server is None: return EmptySMTP() self.server.ehlo() - if environ["EMAIL_USE_SSL"].lower() == "false" and environ["EMAIL_USE_TLS"].lower() == "true": + if config("EMAIL_USE_SSL").lower() == "false" and config("EMAIL_USE_TLS").lower() == "true": self.server.starttls() # stmplib docs recommend calling ehlo() before & after starttls() self.server.ehlo() - self.server.login(user=environ["EMAIL_USER"], password=environ["EMAIL_PASSWORD"]) + self.server.login(user=config("EMAIL_USER"), password=config("EMAIL_PASSWORD")) return self.server def __exit__(self, *args): diff --git a/api/entrypoint.sh b/api/entrypoint.sh index 4a8c790c8..60fefb5c0 100755 --- a/api/entrypoint.sh +++ b/api/entrypoint.sh @@ -1,3 +1,2 @@ #!/bin/bash -python env_handler.py -chalice local --no-autoreload --host 0.0.0.0 --stage ${ENTERPRISE_BUILD} +uvicorn app:app --host 0.0.0.0 diff --git a/api/env_handler.py b/api/env_handler.py deleted file mode 100644 index d56dd17c8..000000000 --- a/api/env_handler.py +++ /dev/null @@ -1,13 +0,0 @@ -from os import environ -import json - -with open('.chalice/config.json') as json_file: - data = json.load(json_file) - stages = data.get("stages", {}) - for s in stages.keys(): - if environ.get("SITE_URL") is None or environ["SITE_URL"] == '': - environ["SITE_URL"] = environ.get("S3_HOST", "") - data["stages"][s]["environment_variables"] = {**stages[s].get("environment_variables", {}), **environ} -with open('.chalice/config.json', 'w') as outfile: - json.dump(data, outfile, indent=2, sort_keys=True) - print("override config.json") diff --git a/api/or_dependencies.py b/api/or_dependencies.py new file mode 100644 index 000000000..7eee72c49 --- /dev/null +++ b/api/or_dependencies.py @@ -0,0 +1,43 @@ +import json +from typing import Callable + +from fastapi.routing import APIRoute +from starlette import status +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.responses import Response, JSONResponse + +import schemas + + +async def OR_context(request: Request) -> schemas.CurrentContext: + if hasattr(request.state, "currentContext"): + return request.state.currentContext + else: + raise Exception("currentContext not found") + + +class ORRoute(APIRoute): + def get_route_handler(self) -> Callable: + original_route_handler = super().get_route_handler() + + async def custom_route_handler(request: Request) -> Response: + try: + response: Response = await original_route_handler(request) + except HTTPException as e: + if e.status_code // 100 == 4: + return JSONResponse(content={"errors": [e.detail]}, status_code=e.status_code) + else: + raise e + + if isinstance(response, JSONResponse): + response: JSONResponse = response + body = json.loads(response.body.decode('utf8')) + if response.status_code == 200 and body is not None and body.get("errors") is not None: + if "not found" in body["errors"][0]: + response.status_code = status.HTTP_404_NOT_FOUND + else: + response.status_code = status.HTTP_400_BAD_REQUEST + return response + + return custom_route_handler diff --git a/api/requirements.txt b/api/requirements.txt index f211cec45..0a239790c 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -3,9 +3,13 @@ urllib3==1.26.6 boto3==1.16.1 pyjwt==1.7.1 psycopg2-binary==2.8.6 -pytz==2020.1 -sentry-sdk==0.19.1 elasticsearch==7.9.1 jira==2.0.0 -schedule==1.1.0 -croniter==1.0.12 \ No newline at end of file + + + +fastapi==0.70.1 +uvicorn[standard]==0.16.0 +python-decouple==3.5 +pydantic[email]==1.8.2 +apscheduler==3.8.1 \ No newline at end of file diff --git a/api/chalicelib/blueprints/app/__init__.py b/api/routers/__init__.py similarity index 100% rename from api/chalicelib/blueprints/app/__init__.py rename to api/routers/__init__.py diff --git a/api/chalicelib/blueprints/subs/__init__.py b/api/routers/app/__init__.py similarity index 100% rename from api/chalicelib/blueprints/subs/__init__.py rename to api/routers/app/__init__.py diff --git a/api/routers/app/v1_api.py b/api/routers/app/v1_api.py new file mode 100644 index 000000000..1dcc7eb7d --- /dev/null +++ b/api/routers/app/v1_api.py @@ -0,0 +1,120 @@ +from fastapi import Depends, Body + +import schemas +from chalicelib.core import sessions, events, jobs, projects +from chalicelib.utils.TimeUTC import TimeUTC +from or_dependencies import OR_context +from routers.base import get_routers + +public_app, app, app_apikey = get_routers() + + +@app_apikey.get('/v1/{projectKey}/users/{userId}/sessions', tags=["api"]) +def get_user_sessions(projectKey: str, userId: str, start_date: int = None, end_date: int = None): + projectId = projects.get_internal_project_id(projectKey) + + return { + 'data': sessions.get_user_sessions( + project_id=projectId, + user_id=userId, + start_date=start_date, + end_date=end_date + ) + } + + +@app_apikey.get('/v1/{projectKey}/sessions/{sessionId}/events', tags=["api"]) +def get_session_events(projectKey: str, sessionId: int): + projectId = projects.get_internal_project_id(projectKey) + return { + 'data': events.get_by_sessionId2_pg( + project_id=projectId, + session_id=sessionId + ) + } + + +@app_apikey.get('/v1/{projectKey}/users/{userId}', tags=["api"]) +def get_user_details(projectKey: str, userId: str): + projectId = projects.get_internal_project_id(projectKey) + return { + 'data': sessions.get_session_user( + project_id=projectId, + user_id=userId + ) + } + + +@app_apikey.delete('/v1/{projectKey}/users/{userId}', tags=["api"]) +def schedule_to_delete_user_data(projectKey: str, userId: str): + projectId = projects.get_internal_project_id(projectKey) + data = {"action": "delete_user_data", + "reference_id": userId, + "description": f"Delete user sessions of userId = {userId}", + "start_at": TimeUTC.to_human_readable(TimeUTC.midnight(1))} + record = jobs.create(project_id=projectId, data=data) + return { + 'data': record + } + + +@app_apikey.get('/v1/{projectKey}/jobs', tags=["api"]) +def get_jobs(projectKey: str): + projectId = projects.get_internal_project_id(projectKey) + return { + 'data': jobs.get_all(project_id=projectId) + } + + +@app_apikey.get('/v1/{projectKey}/jobs/{jobId}', tags=["api"]) +def get_job(projectKey: str, jobId: int): + return { + 'data': jobs.get(job_id=jobId) + } + + +@app_apikey.delete('/v1/{projectKey}/jobs/{jobId}', tags=["api"]) +def cancel_job(projectKey: str, jobId: int): + job = jobs.get(job_id=jobId) + job_not_found = len(job.keys()) == 0 + + if job_not_found: + return {"errors": ["Job not found."]} + if job["status"] == jobs.JobStatus.COMPLETED or job["status"] == jobs.JobStatus.CANCELLED: + return {"errors": ["The request job has already been canceled/completed."]} + + job["status"] = "cancelled" + return { + 'data': jobs.update(job_id=jobId, job=job) + } + + +@app_apikey.get('/v1/projects', tags=["api"]) +def get_projects(context: schemas.CurrentContext = Depends(OR_context)): + records = projects.get_projects(tenant_id=context.tenant_id) + for record in records: + del record['projectId'] + + return { + 'data': records + } + + +@app_apikey.get('/v1/projects/{projectKey}', tags=["api"]) +def get_project(projectKey: str, context: schemas.CurrentContext = Depends(OR_context)): + return { + 'data': projects.get_project_by_key(tenant_id=context.tenant_id, project_key=projectKey) + } + + +@app_apikey.post('/v1/projects', tags=["api"]) +def create_project(data: schemas.CreateProjectSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + record = projects.create( + tenant_id=context.tenant_id, + user_id=None, + data=data, + skip_authorization=True + ) + del record['data']['projectId'] + return record diff --git a/api/routers/base.py b/api/routers/base.py new file mode 100644 index 000000000..ff7fe165f --- /dev/null +++ b/api/routers/base.py @@ -0,0 +1,12 @@ +from fastapi import APIRouter, Depends + +from auth.auth_apikey import APIKeyAuth +from auth.auth_jwt import JWTAuth +from or_dependencies import ORRoute + + +def get_routers() -> (APIRouter, APIRouter, APIRouter): + public_app = APIRouter(route_class=ORRoute) + app = APIRouter(dependencies=[Depends(JWTAuth())], route_class=ORRoute) + app_apikey = APIRouter(dependencies=[Depends(APIKeyAuth())], route_class=ORRoute) + return public_app, app, app_apikey diff --git a/api/routers/core.py b/api/routers/core.py new file mode 100644 index 000000000..f297003b5 --- /dev/null +++ b/api/routers/core.py @@ -0,0 +1,1097 @@ +from typing import Union + +from decouple import config +from fastapi import Depends, Body + +import schemas +from chalicelib.core import log_tool_rollbar, sourcemaps, events, sessions_assignments, projects, \ + sessions_metas, alerts, funnels, issues, integrations_manager, metadata, \ + log_tool_elasticsearch, log_tool_datadog, \ + log_tool_stackdriver, reset_password, sessions_favorite_viewed, \ + log_tool_cloudwatch, log_tool_sentry, log_tool_sumologic, log_tools, errors, sessions, \ + log_tool_newrelic, announcements, log_tool_bugsnag, weekly_report, integration_jira_cloud, integration_github, \ + assist, heatmaps, mobile, signup, tenants, errors_favorite_viewed, boarding, notifications, webhook, slack, users +from chalicelib.core.collaboration_slack import Slack +from chalicelib.utils import email_helper +from chalicelib.utils.TimeUTC import TimeUTC +from or_dependencies import OR_context +from routers.base import get_routers + +public_app, app, app_apikey = get_routers() + + +@app.get('/{projectId}/sessions2/favorite', tags=["sessions"]) +def get_favorite_sessions(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return { + 'data': sessions.get_favorite_sessions(project_id=projectId, user_id=context.user_id, include_viewed=True) + } + + +@app.get('/{projectId}/sessions2/{sessionId}', tags=["sessions"]) +def get_session2(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, user_id=context.user_id, + include_fav_viewed=True, group_metadata=True) + if data is None: + return {"errors": ["session not found"]} + + sessions_favorite_viewed.view_session(project_id=projectId, user_id=context.user_id, session_id=sessionId) + return { + 'data': data + } + + +@app.get('/{projectId}/sessions2/{sessionId}/favorite', tags=["sessions"]) +def add_remove_favorite_session2(projectId: int, sessionId: int, + context: schemas.CurrentContext = Depends(OR_context)): + return { + "data": sessions_favorite_viewed.favorite_session(project_id=projectId, user_id=context.user_id, + session_id=sessionId)} + + +@app.get('/{projectId}/sessions2/{sessionId}/assign', tags=["sessions"]) +def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId, + tenant_id=context.tenant_id, + user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.get('/{projectId}/sessions2/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"]) +def get_error_trace(projectId: int, sessionId: int, errorId: str, + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.get_trace(project_id=projectId, error_id=errorId) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.get('/{projectId}/sessions2/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"]) +def assign_session(projectId: int, sessionId: int, issueId: str, + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, + tenant_id=context.tenant_id, user_id=context.user_id) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.post('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) +@app.put('/{projectId}/sessions2/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"]) +def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_assignments.comment(tenant_id=context.tenant_id, project_id=projectId, + session_id=sessionId, assignment_id=issueId, + user_id=context.user_id, message=data.message) + if "errors" in data.keys(): + return data + return { + 'data': data + } + + +@app.get('/{projectId}/events/search', tags=["events"]) +def events_search(projectId: int, q: str, type: str = None, key: str = None, source: str = None, + context: schemas.CurrentContext = Depends(OR_context)): + if len(q) == 0: + return {"data": []} + result = events.search_pg2(text=q, event_type=type, project_id=projectId, source=source, + key=key) + return result + + +@app.post('/{projectId}/sessions/search2', tags=["sessions"]) +def sessions_search2(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions.search2_pg(data.dict(), projectId, user_id=context.user_id) + return {'data': data} + + +@app.get('/{projectId}/sessions/filters', tags=["sessions"]) +def session_filter_values(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {'data': sessions_metas.get_key_values(projectId)} + + +@app.get('/{projectId}/sessions/filters/top', tags=["sessions"]) +def session_top_filter_values(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {'data': sessions_metas.get_top_key_values(projectId)} + + +@app.get('/{projectId}/sessions/filters/search', tags=["sessions"]) +def get_session_filters_meta(projectId: int, q: str, type: str, + context: schemas.CurrentContext = Depends(OR_context)): + meta_type = type + if len(meta_type) == 0: + return {"data": []} + if len(q) == 0: + return {"data": []} + return sessions_metas.search(project_id=projectId, meta_type=meta_type, text=q) + + +@app.post('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"]) +@app.put('/{projectId}/integrations/{integration}/notify/{integrationId}/{source}/{sourceId}', tags=["integrations"]) +def integration_notify(projectId: int, integration: str, integrationId: int, source: str, sourceId: str, + data: schemas.IntegrationNotificationSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + comment = None + if data.comment: + comment = data.comment + if integration == "slack": + args = {"tenant_id": context.tenant_id, + "user": context.email, "comment": comment, "project_id": projectId, + "integration_id": integrationId} + if source == "sessions": + return Slack.share_session(session_id=sourceId, **args) + elif source == "errors": + return Slack.share_error(error_id=sourceId, **args) + return {"data": None} + + +@app.get('/integrations/sentry', tags=["integrations"]) +def get_all_sentry(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_sentry.get_all(tenant_id=context.tenant_id)} + + +@app.get('/{projectId}/integrations/sentry', tags=["integrations"]) +def get_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_sentry.get(project_id=projectId)} + + +@app.post('/{projectId}/integrations/sentry', tags=["integrations"]) +@app.put('/{projectId}/integrations/sentry', tags=["integrations"]) +def add_edit_sentry(projectId: int, data: schemas.SentrySchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + + +@app.delete('/{projectId}/integrations/sentry', tags=["integrations"]) +def delete_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_sentry.delete(tenant_id=context.tenant_id, project_id=projectId)} + + +@app.get('/{projectId}/integrations/sentry/events/{eventId}', tags=["integrations"]) +def proxy_sentry(projectId: int, eventId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_sentry.proxy_get(tenant_id=context.tenant_id, project_id=projectId, event_id=eventId)} + + +@app.get('/integrations/datadog', tags=["integrations"]) +def get_all_datadog(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_datadog.get_all(tenant_id=context.tenant_id)} + + +@app.get('/{projectId}/integrations/datadog', tags=["integrations"]) +def get_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_datadog.get(project_id=projectId)} + + +@app.post('/{projectId}/integrations/datadog', tags=["integrations"]) +@app.put('/{projectId}/integrations/datadog', tags=["integrations"]) +def add_edit_datadog(projectId: int, data: schemas.DatadogSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + + +@app.delete('/{projectId}/integrations/datadog', tags=["integrations"]) +def delete_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_datadog.delete(tenant_id=context.tenant_id, project_id=projectId)} + + +@app.get('/integrations/stackdriver', tags=["integrations"]) +def get_all_stackdriver(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_stackdriver.get_all(tenant_id=context.tenant_id)} + + +@app.get('/{projectId}/integrations/stackdriver', tags=["integrations"]) +def get_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_stackdriver.get(project_id=projectId)} + + +@app.post('/{projectId}/integrations/stackdriver', tags=["integrations"]) +@app.put('/{projectId}/integrations/stackdriver', tags=["integrations"]) +def add_edit_stackdriver(projectId: int, data: schemas.StackdriverSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + + +@app.delete('/{projectId}/integrations/stackdriver', tags=["integrations"]) +def delete_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_stackdriver.delete(tenant_id=context.tenant_id, project_id=projectId)} + + +@app.get('/integrations/newrelic', tags=["integrations"]) +def get_all_newrelic(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_newrelic.get_all(tenant_id=context.tenant_id)} + + +@app.get('/{projectId}/integrations/newrelic', tags=["integrations"]) +def get_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_newrelic.get(project_id=projectId)} + + +@app.post('/{projectId}/integrations/newrelic', tags=["integrations"]) +@app.put('/{projectId}/integrations/newrelic', tags=["integrations"]) +def add_edit_newrelic(projectId: int, data: schemas.NewrelicSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + + +@app.delete('/{projectId}/integrations/newrelic', tags=["integrations"]) +def delete_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_newrelic.delete(tenant_id=context.tenant_id, project_id=projectId)} + + +@app.get('/integrations/rollbar', tags=["integrations"]) +def get_all_rollbar(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_rollbar.get_all(tenant_id=context.tenant_id)} + + +@app.get('/{projectId}/integrations/rollbar', tags=["integrations"]) +def get_rollbar(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_rollbar.get(project_id=projectId)} + + +@app.post('/{projectId}/integrations/rollbar', tags=["integrations"]) +@app.put('/{projectId}/integrations/rollbar', tags=["integrations"]) +def add_edit_rollbar(projectId: int, data: schemas.RollbarSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + + +@app.delete('/{projectId}/integrations/rollbar', tags=["integrations"]) +def delete_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_rollbar.delete(tenant_id=context.tenant_id, project_id=projectId)} + + +@app.post('/integrations/bugsnag/list_projects', tags=["integrations"]) +def list_projects_bugsnag(data: schemas.BugsnagBasicSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_bugsnag.list_projects(auth_token=data.authorizationToken)} + + +@app.get('/integrations/bugsnag', tags=["integrations"]) +def get_all_bugsnag(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_bugsnag.get_all(tenant_id=context.tenant_id)} + + +@app.get('/{projectId}/integrations/bugsnag', tags=["integrations"]) +def get_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_bugsnag.get(project_id=projectId)} + + +@app.post('/{projectId}/integrations/bugsnag', tags=["integrations"]) +@app.put('/{projectId}/integrations/bugsnag', tags=["integrations"]) +def add_edit_bugsnag(projectId: int, data: schemas.BugsnagSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + + +@app.delete('/{projectId}/integrations/bugsnag', tags=["integrations"]) +def delete_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_bugsnag.delete(tenant_id=context.tenant_id, project_id=projectId)} + + +@app.post('/integrations/cloudwatch/list_groups', tags=["integrations"]) +def list_groups_cloudwatch(data: schemas.CloudwatchBasicSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_cloudwatch.list_log_groups(aws_access_key_id=data.awsAccessKeyId, + aws_secret_access_key=data.awsSecretAccessKey, + region=data.region)} + + +@app.get('/integrations/cloudwatch', tags=["integrations"]) +def get_all_cloudwatch(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_cloudwatch.get_all(tenant_id=context.tenant_id)} + + +@app.get('/{projectId}/integrations/cloudwatch', tags=["integrations"]) +def get_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_cloudwatch.get(project_id=projectId)} + + +@app.post('/{projectId}/integrations/cloudwatch', tags=["integrations"]) +@app.put('/{projectId}/integrations/cloudwatch', tags=["integrations"]) +def add_edit_cloudwatch(projectId: int, data: schemas.CloudwatchSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + + +@app.delete('/{projectId}/integrations/cloudwatch', tags=["integrations"]) +def delete_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_cloudwatch.delete(tenant_id=context.tenant_id, project_id=projectId)} + + +@app.get('/integrations/elasticsearch', tags=["integrations"]) +def get_all_elasticsearch(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_elasticsearch.get_all(tenant_id=context.tenant_id)} + + +@app.get('/{projectId}/integrations/elasticsearch', tags=["integrations"]) +def get_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_elasticsearch.get(project_id=projectId)} + + +@app.post('/integrations/elasticsearch/test', tags=["integrations"]) +def test_elasticsearch_connection(data: schemas.ElasticsearchBasicSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_elasticsearch.ping(tenant_id=context.tenant_id, **data.dict())} + + +@app.post('/{projectId}/integrations/elasticsearch', tags=["integrations"]) +@app.put('/{projectId}/integrations/elasticsearch', tags=["integrations"]) +def add_edit_elasticsearch(projectId: int, data: schemas.ElasticsearchSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return { + "data": log_tool_elasticsearch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + + +@app.delete('/{projectId}/integrations/elasticsearch', tags=["integrations"]) +def delete_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_elasticsearch.delete(tenant_id=context.tenant_id, project_id=projectId)} + + +@app.get('/integrations/sumologic', tags=["integrations"]) +def get_all_sumologic(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_sumologic.get_all(tenant_id=context.tenant_id)} + + +@app.get('/{projectId}/integrations/sumologic', tags=["integrations"]) +def get_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_sumologic.get(project_id=projectId)} + + +@app.post('/{projectId}/integrations/sumologic', tags=["integrations"]) +@app.put('/{projectId}/integrations/sumologic', tags=["integrations"]) +def add_edit_sumologic(projectId: int, data: schemas.SumologicSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + + +@app.delete('/{projectId}/integrations/sumologic', tags=["integrations"]) +def delete_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": log_tool_sumologic.delete(tenant_id=context.tenant_id, project_id=projectId)} + + +@app.get('/integrations/issues', tags=["integrations"]) +def get_integration_status(context: schemas.CurrentContext = Depends(OR_context)): + error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id, + user_id=context.user_id) + if error is not None: + return {"data": {}} + return {"data": integration.get_obfuscated()} + + +@app.post('/integrations/jira', tags=["integrations"]) +@app.put('/integrations/jira', tags=["integrations"]) +def add_edit_jira_cloud(data: schemas.JiraGithubSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER, + tenant_id=context.tenant_id, + user_id=context.user_id) + if error is not None: + return error + data.provider = integration_jira_cloud.PROVIDER + return {"data": integration.add_edit(data=data.dict())} + + +@app.post('/integrations/github', tags=["integrations"]) +@app.put('/integrations/github', tags=["integrations"]) +def add_edit_github(data: schemas.JiraGithubSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER, + tenant_id=context.tenant_id, + user_id=context.user_id) + if error is not None: + return error + data.provider = integration_github.PROVIDER + return {"data": integration.add_edit(data=data.dict())} + + +@app.delete('/integrations/issues', tags=["integrations"]) +def delete_default_issue_tracking_tool(context: schemas.CurrentContext = Depends(OR_context)): + error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id, + user_id=context.user_id) + if error is not None: + return error + return {"data": integration.delete()} + + +@app.delete('/integrations/jira', tags=["integrations"]) +def delete_jira_cloud(context: schemas.CurrentContext = Depends(OR_context)): + error, integration = integrations_manager.get_integration(tool=integration_jira_cloud.PROVIDER, + tenant_id=context.tenant_id, + user_id=context.user_id) + if error is not None: + return error + return {"data": integration.delete()} + + +@app.delete('/integrations/github', tags=["integrations"]) +def delete_github(context: schemas.CurrentContext = Depends(OR_context)): + error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER, + tenant_id=context.tenant_id, + user_id=context.user_id) + if error is not None: + return error + return {"data": integration.delete()} + + +@app.get('/integrations/issues/list_projects', tags=["integrations"]) +def get_all_issue_tracking_projects(context: schemas.CurrentContext = Depends(OR_context)): + error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id, + user_id=context.user_id) + if error is not None: + return error + data = integration.issue_handler.get_projects() + if "errors" in data: + return data + return {"data": data} + + +@app.get('/integrations/issues/{integrationProjectId}', tags=["integrations"]) +def get_integration_metadata(integrationProjectId: int, context: schemas.CurrentContext = Depends(OR_context)): + error, integration = integrations_manager.get_integration(tenant_id=context.tenant_id, + user_id=context.user_id) + if error is not None: + return error + data = integration.issue_handler.get_metas(integrationProjectId) + if "errors" in data.keys(): + return data + return {"data": data} + + +@app.get('/{projectId}/assignments', tags=["assignment"]) +def get_all_assignments(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_assignments.get_all(project_id=projectId, user_id=context.user_id) + return { + 'data': data + } + + +@app.post('/{projectId}/sessions2/{sessionId}/assign/projects/{integrationProjectId}', tags=["assignment"]) +@app.put('/{projectId}/sessions2/{sessionId}/assign/projects/{integrationProjectId}', tags=["assignment"]) +def create_issue_assignment(projectId: int, sessionId: int, integrationProjectId, + data: schemas.AssignmentSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions_assignments.create_new_assignment(tenant_id=context.tenant_id, project_id=projectId, + session_id=sessionId, + creator_id=context.user_id, assignee=data.assignee, + description=data.description, title=data.title, + issue_type=data.issue_type, + integration_project_id=integrationProjectId) + if "errors" in data.keys(): + return data + return { + 'data': data + } + + +@app.get('/{projectId}/gdpr', tags=["projects", "gdpr"]) +def get_gdpr(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": projects.get_gdpr(project_id=projectId)} + + +@app.post('/{projectId}/gdpr', tags=["projects", "gdpr"]) +@app.put('/{projectId}/gdpr', tags=["projects", "gdpr"]) +def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": projects.edit_gdpr(project_id=projectId, gdpr=data.dict())} + + +@public_app.post('/password/reset-link', tags=["reset password"]) +@public_app.put('/password/reset-link', tags=["reset password"]) +def reset_password_handler(data: schemas.ForgetPasswordPayloadSchema = Body(...)): + if len(data.email) < 5: + return {"errors": ["please provide a valid email address"]} + return reset_password.reset(data) + + +@app.get('/{projectId}/metadata', tags=["metadata"]) +def get_metadata(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": metadata.get(project_id=projectId)} + + +@app.post('/{projectId}/metadata/list', tags=["metadata"]) +@app.put('/{projectId}/metadata/list', tags=["metadata"]) +def add_edit_delete_metadata(projectId: int, data: schemas.MetadataListSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return metadata.add_edit_delete(tenant_id=context.tenant_id, project_id=projectId, new_metas=data.list) + + +@app.post('/{projectId}/metadata', tags=["metadata"]) +@app.put('/{projectId}/metadata', tags=["metadata"]) +def add_metadata(projectId: int, data: schemas.MetadataBasicSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return metadata.add(tenant_id=context.tenant_id, project_id=projectId, new_name=data.key) + + +@app.post('/{projectId}/metadata/{index}', tags=["metadata"]) +@app.put('/{projectId}/metadata/{index}', tags=["metadata"]) +def edit_metadata(projectId: int, index: int, data: schemas.MetadataBasicSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return metadata.edit(tenant_id=context.tenant_id, project_id=projectId, index=int(index), + new_name=data.key) + + +@app.delete('/{projectId}/metadata/{index}', tags=["metadata"]) +def delete_metadata(projectId: int, index: int, context: schemas.CurrentContext = Depends(OR_context)): + return metadata.delete(tenant_id=context.tenant_id, project_id=projectId, index=index) + + +@app.get('/{projectId}/metadata/search', tags=["metadata"]) +def search_metadata(projectId: int, q: str, key: str, context: schemas.CurrentContext = Depends(OR_context)): + if len(q) == 0 and len(key) == 0: + return {"data": []} + if len(q) == 0: + return {"errors": ["please provide a value for search"]} + if len(key) == 0: + return {"errors": ["please provide a key for search"]} + return metadata.search(tenant_id=context.tenant_id, project_id=projectId, value=q, key=key) + + +@app.get('/{projectId}/integration/sources', tags=["integrations"]) +def search_integrations(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return log_tools.search(project_id=projectId) + + +@public_app.post('/async/email_assignment', tags=["async mail"]) +def async_send_signup_emails(data: schemas.EmailPayloadSchema = Body(...)): + if data.auth != config("async_Token"): + return {} + email_helper.send_assign_session(recipient=data.email, link=data.link, message=data.message) + + +# TODO: transform this to a background task when you find a way to run it without an attached request +@public_app.post('/async/funnel/weekly_report2', tags=["async mail"]) +def async_weekly_report(data: schemas.WeeklyReportPayloadSchema = Body(...)): + print("=========================> Sending weekly report") + if data.auth != config("async_Token"): + return {} + email_helper.weekly_report2(recipients=data.email, data=data.data) + + +# @public_app.post('/async/basic/member_invitation', tags=["async mail"]) +# def async_basic_emails(data: schemas.MemberInvitationPayloadSchema = Body(...)): +# if data.auth != config("async_Token"): +# return {} +# email_helper.send_team_invitation(recipient=data.email, invitation_link=data.invitation_link, +# client_id=data.client_id, sender_name=data.sender_name) + + +@app.get('/{projectId}/sample_rate', tags=["projects"]) +def get_capture_status(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": projects.get_capture_status(project_id=projectId)} + + +@app.post('/{projectId}/sample_rate', tags=["projects"]) +@app.put('/{projectId}/sample_rate', tags=["projects"]) +def update_capture_status(projectId: int, data: schemas.SampleRateSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": projects.update_capture_status(project_id=projectId, changes=data.dict())} + + +@app.get('/announcements', tags=["announcements"]) +def get_all_announcements(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": announcements.get_all(context.user_id)} + + +@app.get('/announcements/view', tags=["announcements"]) +def get_all_announcements(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": announcements.view(user_id=context.user_id)} + + +@app.post('/{projectId}/errors/merge', tags=["errors"]) +def errors_merge(projectId: int, data: schemas.ErrorIdsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.merge(error_ids=data.errors) + return data + + +@app.get('/show_banner', tags=["banner"]) +def errors_merge(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": False} + + +@app.post('/{projectId}/alerts', tags=["alerts"]) +@app.put('/{projectId}/alerts', tags=["alerts"]) +def create_alert(projectId: int, data: schemas.AlertSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return alerts.create(projectId, data.dict()) + + +@app.get('/{projectId}/alerts', tags=["alerts"]) +def get_all_alerts(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": alerts.get_all(projectId)} + + +@app.get('/{projectId}/alerts/{alertId}', tags=["alerts"]) +def get_alert(projectId: int, alertId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": alerts.get(alertId)} + + +@app.post('/{projectId}/alerts/{alertId}', tags=["alerts"]) +@app.put('/{projectId}/alerts/{alertId}', tags=["alerts"]) +def update_alert(projectId: int, alertId: int, data: schemas.AlertSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return alerts.update(alertId, data.dict()) + + +@app.delete('/{projectId}/alerts/{alertId}', tags=["alerts"]) +def delete_alert(projectId: int, alertId: int, context: schemas.CurrentContext = Depends(OR_context)): + return alerts.delete(projectId, alertId) + + +@app.post('/{projectId}/funnels', tags=["funnels"]) +@app.put('/{projectId}/funnels', tags=["funnels"]) +def add_funnel(projectId: int, data: schemas.FunnelSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return funnels.create(project_id=projectId, + user_id=context.user_id, + name=data.name, + filter=data.filter.dict(), + is_public=data.is_public) + + +@app.get('/{projectId}/funnels', tags=["funnels"]) +def get_funnels(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": funnels.get_by_user(project_id=projectId, + user_id=context.user_id, + range_value=None, + start_date=None, + end_date=None, + details=False)} + + +@app.get('/{projectId}/funnels/details', tags=["funnels"]) +def get_funnels_with_details(projectId: int, rangeValue: str = None, startDate: int = None, endDate: int = None, + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": funnels.get_by_user(project_id=projectId, + user_id=context.user_id, + range_value=rangeValue, + start_date=startDate, + end_date=endDate, + details=True)} + + +@app.get('/{projectId}/funnels/issue_types', tags=["funnels"]) +def get_possible_issue_types(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": funnels.get_possible_issue_types(project_id=projectId)} + + +@app.get('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"]) +def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, startDate: int = None, + endDate: int = None, context: schemas.CurrentContext = Depends(OR_context)): + return funnels.get_top_insights(funnel_id=funnelId, project_id=projectId, + range_value=rangeValue, + start_date=startDate, + end_date=endDate) + + +@app.post('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"]) +@app.put('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"]) +def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, project_id=projectId, data=data.dict()) + + +@app.get('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"]) +def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDate: int = None, endDate: int = None, + context: schemas.CurrentContext = Depends(OR_context)): + return funnels.get_issues(funnel_id=funnelId, project_id=projectId, + range_value=rangeValue, + start_date=startDate, end_date=endDate) + + +@app.post('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"]) +@app.put('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"]) +def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, project_id=projectId, data=data.dict())} + + +@app.get('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"]) +def get_funnel_sessions(projectId: int, funnelId: int, rangeValue: str = None, startDate: int = None, + endDate: int = None, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": funnels.get_sessions(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, + range_value=rangeValue, + start_date=startDate, + end_date=endDate)} + + +@app.post('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"]) +@app.put('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"]) +def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": funnels.get_sessions_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, + data=data.dict())} + + +@app.get('/{projectId}/funnels/issues/{issueId}/sessions', tags=["funnels"]) +def get_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None, + context: schemas.CurrentContext = Depends(OR_context)): + issue = issues.get(project_id=projectId, issue_id=issueId) + return { + "data": {"sessions": sessions.search_by_issue(user_id=context.user_id, project_id=projectId, issue=issue, + start_date=startDate, + end_date=endDate), + "issue": issue}} + + +@app.post('/{projectId}/funnels/{funnelId}/issues/{issueId}/sessions', tags=["funnels"]) +@app.put('/{projectId}/funnels/{funnelId}/issues/{issueId}/sessions', tags=["funnels"]) +def get_funnel_issue_sessions(projectId: int, funnelId: int, issueId: str, + data: schemas.FunnelSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = funnels.search_by_issue(project_id=projectId, user_id=context.user_id, issue_id=issueId, + funnel_id=funnelId, data=data.dict()) + if "errors" in data: + return data + if data.get("issue") is None: + data["issue"] = issues.get(project_id=projectId, issue_id=issueId) + return { + "data": data + } + + +@app.get('/{projectId}/funnels/{funnelId}', tags=["funnels"]) +def get_funnel(projectId: int, funnelId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = funnels.get(funnel_id=funnelId, project_id=projectId) + if data is None: + return {"errors": ["funnel not found"]} + return {"data": data} + + +@app.post('/{projectId}/funnels/{funnelId}', tags=["funnels"]) +@app.put('/{projectId}/funnels/{funnelId}', tags=["funnels"]) +def edit_funnel(projectId: int, funnelId: int, data: schemas.UpdateFunnelSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return funnels.update(funnel_id=funnelId, + user_id=context.user_id, + name=data.name, + filter=data.filter.dict(), + is_public=data.is_public) + + +@app.delete('/{projectId}/funnels/{funnelId}', tags=["funnels"]) +def delete_filter(projectId: int, funnelId: int, context: schemas.CurrentContext = Depends(OR_context)): + return funnels.delete(user_id=context.user_id, funnel_id=funnelId, project_id=projectId) + + +@app_apikey.put('/{projectKey}/sourcemaps/', tags=["sourcemaps"]) +@app_apikey.put('/{projectKey}/sourcemaps', tags=["sourcemaps"]) +def sign_sourcemap_for_upload(projectKey: str, data: schemas.SourcemapUploadPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + project_id = projects.get_internal_project_id(projectKey) + if project_id is None: + return {"errors": ["Project not found."]} + + return {"data": sourcemaps.presign_upload_urls(project_id=project_id, urls=data.urls)} + + +@app.get('/config/weekly_report', tags=["weekly report config"]) +def get_weekly_report_config(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": weekly_report.get_config(user_id=context.user_id)} + + +@app.post('/config/weekly_report', tags=["weekly report config"]) +@app.put('/config/weekly_report', tags=["weekly report config"]) +def edit_weekly_report_config(data: schemas.WeeklyReportConfigSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": weekly_report.edit_config(user_id=context.user_id, weekly_report=data.weekly_report)} + + +@app.get('/{projectId}/issue_types', tags=["issues"]) +def issue_types(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": issues.get_all_types()} + + +@app.get('/issue_types', tags=["issues"]) +def all_issue_types(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": issues.get_all_types()} + + +@app.get('/{projectId}/assist/sessions', tags=["assist"]) +def sessions_live(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = assist.get_live_sessions(projectId) + return {'data': data} + + +@app.post('/{projectId}/assist/sessions', tags=["assist"]) +def sessions_live_search(projectId: int, data: schemas.AssistSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = assist.get_live_sessions(projectId, filters=data.filters) + return {'data': data} + + +@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"]) +def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": heatmaps.get_by_url(project_id=projectId, data=data.dict())} + + +@app.post('/{projectId}/mobile/{sessionId}/urls', tags=['mobile']) +def mobile_signe(projectId: int, sessionId: int, data: schemas.MobileSignPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": mobile.sign_keys(project_id=projectId, session_id=sessionId, keys=data.keys)} + + +@public_app.put('/signup', tags=['signup']) +@public_app.post('/signup', tags=['signup']) +def signup_handler(data: schemas.UserSignupSchema = Body(...)): + return signup.create_step1(data) + + +@app.get('/projects', tags=['projects']) +def get_projects(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": projects.get_projects(tenant_id=context.tenant_id, recording_state=True, gdpr=True, recorded=True, + stack_integrations=True, version=True)} + + +@app.post('/projects', tags=['projects']) +@app.put('/projects', tags=['projects']) +def create_project(data: schemas.CreateProjectSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return projects.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data) + + +@app.post('/projects/{projectId}', tags=['projects']) +@app.put('/projects/{projectId}', tags=['projects']) +def edit_project(projectId: int, data: schemas.CreateProjectSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return projects.edit(tenant_id=context.tenant_id, user_id=context.user_id, data=data, project_id=projectId) + + +@app.delete('/projects/{projectId}', tags=['projects']) +def delete_project(projectId, context: schemas.CurrentContext = Depends(OR_context)): + return projects.delete(tenant_id=context.tenant_id, user_id=context.user_id, project_id=projectId) + + +@app.get('/client', tags=['projects']) +def get_client(context: schemas.CurrentContext = Depends(OR_context)): + r = tenants.get_by_tenant_id(context.tenant_id) + if r is not None: + r.pop("createdAt") + r["projects"] = projects.get_projects(tenant_id=context.tenant_id, recording_state=True, recorded=True, + stack_integrations=True, version=True) + return { + 'data': r + } + + +@app.get('/client/new_api_key', tags=['client']) +def generate_new_tenant_token(context: schemas.CurrentContext = Depends(OR_context)): + return { + 'data': tenants.generate_new_api_key(context.tenant_id) + } + + +@app.put('/client', tags=['client']) +@app.post('/client', tags=['client']) +def edit_client(data: schemas.UpdateTenantSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return tenants.update(tenant_id=context.tenant_id, user_id=context.user_id, data=data) + + +@app.post('/{projectId}/errors/search', tags=['errors']) +def errors_search(projectId: int, status: str = "ALL", favorite: Union[str, bool] = False, + data: schemas.SearchErrorsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + if isinstance(favorite, str): + favorite = True if len(favorite) == 0 else False + return errors.search(data.dict(), projectId, user_id=context.user_id, status=status, + favorite_only=favorite) + + +@app.get('/{projectId}/errors/stats', tags=['errors']) +def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, + context: schemas.CurrentContext = Depends(OR_context)): + return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp) + + +@app.get('/{projectId}/errors/{errorId}', tags=['errors']) +def errors_get_details(projectId: int, errorId: str, density24: int = 24, density30: int = 30, + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, + **{"density24": density24, "density30": density30}) + if data.get("data") is not None: + errors_favorite_viewed.viewed_error(project_id=projectId, user_id=context.user_id, error_id=errorId) + return data + + +@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors']) +def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7), + endDate: int = TimeUTC.now(), density: int = 7, + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId, + **{"startDate": startDate, "endDate": endDate, "density": density}) + return data + + +@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors']) +def errors_get_details_sourcemaps(projectId: int, errorId: str, + context: schemas.CurrentContext = Depends(OR_context)): + data = errors.get_trace(project_id=projectId, error_id=errorId) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"]) +def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), + endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)): + if action == "favorite": + return errors_favorite_viewed.favorite_error(project_id=projectId, user_id=context.user_id, error_id=errorId) + elif action == "sessions": + start_date = startDate + end_date = endDate + return { + "data": errors.get_sessions(project_id=projectId, user_id=context.user_id, error_id=errorId, + start_date=start_date, end_date=end_date)} + elif action in list(errors.ACTION_STATE.keys()): + return errors.change_state(project_id=projectId, user_id=context.user_id, error_id=errorId, action=action) + else: + return {"errors": ["undefined action"]} + + +@public_app.post('/async/alerts/notifications/{step}', tags=["async", "alerts"]) +@public_app.put('/async/alerts/notifications/{step}', tags=["async", "alerts"]) +def send_alerts_notification_async(step: str, data: schemas.AlertNotificationSchema = Body(...)): + if data.auth != config("async_Token"): + return {"errors": ["missing auth"]} + if step == "slack": + slack.send_batch(notifications_list=data.notifications) + elif step == "email": + alerts.send_by_email_batch(notifications_list=data.notifications) + elif step == "webhook": + webhook.trigger_batch(data_list=data.notifications) + + +@app.get('/notifications', tags=['notifications']) +def get_notifications(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": notifications.get_all(tenant_id=context.tenant_id, user_id=context.user_id)} + + +@app.get('/notifications/{notificationId}/view', tags=['notifications']) +def view_notifications(notificationId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context.user_id)} + + +@app.post('/notifications/view', tags=['notifications']) +@app.put('/notifications/view', tags=['notifications']) +def batch_view_notifications(data: schemas.NotificationsViewSchema, + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": notifications.view_notification(notification_ids=data.ids, + startTimestamp=data.startTimestamp, + endTimestamp=data.endTimestamp, + user_id=context.user_id, + tenant_id=context.tenant_id)} + + +@public_app.post('/notifications', tags=['notifications']) +@public_app.put('/notifications', tags=['notifications']) +def create_notifications(data: schemas.CreateNotificationSchema): + if data.token != config("async_Token"): + return {"errors": ["missing token"]} + return notifications.create(data.notifications) + + +@app.get('/boarding', tags=['boarding']) +def get_boarding_state(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": boarding.get_state(tenant_id=context.tenant_id)} + + +@app.get('/boarding/installing', tags=['boarding']) +def get_boarding_state_installing(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": boarding.get_state_installing(tenant_id=context.tenant_id)} + + +@app.get('/boarding/identify-users', tags=["boarding"]) +def get_boarding_state_identify_users(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": boarding.get_state_identify_users(tenant_id=context.tenant_id)} + + +@app.get('/boarding/manage-users', tags=["boarding"]) +def get_boarding_state_manage_users(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": boarding.get_state_manage_users(tenant_id=context.tenant_id)} + + +@app.get('/boarding/integrations', tags=["boarding"]) +def get_boarding_state_integrations(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": boarding.get_state_integrations(tenant_id=context.tenant_id)} + + +@app.get('/integrations/slack/channels', tags=["integrations"]) +def get_slack_channels(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type='slack')} + + +@app.get('/integrations/slack/{integrationId}', tags=["integrations"]) +def get_slack_webhook(integrationId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": webhook.get(tenant_id=context.tenant_id, webhook_id=integrationId)} + + +@app.delete('/integrations/slack/{integrationId}', tags=["integrations"]) +def delete_slack_integration(integrationId: int, context: schemas.CurrentContext = Depends(OR_context)): + return webhook.delete(context.tenant_id, integrationId) + + +@app.post('/webhooks', tags=["webhooks"]) +@app.put('/webhooks', tags=["webhooks"]) +def add_edit_webhook(data: schemas.CreateEditWebhookSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return {"data": webhook.add_edit(tenant_id=context.tenant_id, data=data.dict(), replace_none=True)} + + +@app.get('/webhooks', tags=["webhooks"]) +def get_webhooks(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": webhook.get_by_tenant(tenant_id=context.tenant_id, replace_none=True)} + + +@app.delete('/webhooks/{webhookId}', tags=["webhooks"]) +def delete_webhook(webhookId: int, context: schemas.CurrentContext = Depends(OR_context)): + return {"data": webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId)} + + +@app.get('/client/members', tags=["client"]) +def get_members(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": users.get_members(tenant_id=context.tenant_id)} + + +@app.get('/client/members/{memberId}/reset', tags=["client"]) +def reset_reinvite_member(memberId: int, context: schemas.CurrentContext = Depends(OR_context)): + return users.reset_member(tenant_id=context.tenant_id, editor_id=context.user_id, user_id_to_update=memberId) + + +@app.delete('/client/members/{memberId}', tags=["client"]) +def delete_member(memberId: int, context: schemas.CurrentContext = Depends(OR_context)): + return users.delete_member(tenant_id=context.tenant_id, user_id=context.user_id, id_to_delete=memberId) + + +@app.get('/account/new_api_key', tags=["account"]) +def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": users.generate_new_api_key(user_id=context.user_id)} + + +@app.post('/account', tags=["account"]) +@app.put('/account', tags=["account"]) +def edit_account(data: schemas.EditUserSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return users.edit(tenant_id=context.tenant_id, user_id_to_update=context.user_id, changes=data.dict(), + editor_id=context.user_id) + + +@app.post('/account/password', tags=["account"]) +@app.put('/account/password', tags=["account"]) +def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return users.change_password(email=context.email, old_password=data.old_password, + new_password=data.new_password, tenant_id=context.tenant_id, + user_id=context.user_id) diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py new file mode 100644 index 000000000..bf8a66092 --- /dev/null +++ b/api/routers/core_dynamic.py @@ -0,0 +1,218 @@ +from typing import Optional + +from decouple import config +from fastapi import Body, Depends, HTTPException, status, BackgroundTasks +from starlette.responses import RedirectResponse + +import schemas +from chalicelib.core import assist +from chalicelib.core import integrations_manager +from chalicelib.core import sessions +from chalicelib.core import tenants, users, metadata, projects, license, alerts +from chalicelib.core import webhook +from chalicelib.core.collaboration_slack import Slack +from chalicelib.utils import captcha +from chalicelib.utils import helper +from or_dependencies import OR_context +from routers.base import get_routers + +public_app, app, app_apikey = get_routers() + + +@public_app.get('/signup', tags=['signup']) +def get_all_signup(): + return {"data": {"tenants": tenants.tenants_exists(), + "sso": None, + "ssoProvider": None, + "edition": helper.get_edition()}} + + +@public_app.post('/login', tags=["authentication"]) +def login(data: schemas.UserLoginSchema = Body(...)): + if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid captcha." + ) + + r = users.authenticate(data.email, data.password, for_plugin=False) + if r is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="You’ve entered invalid Email or Password." + ) + + tenant_id = r.pop("tenantId") + + r["limits"] = { + "teamMember": -1, + "projects": -1, + "metadata": metadata.get_remaining_metadata_with_count(tenant_id)} + + c = tenants.get_by_tenant_id(tenant_id) + c.pop("createdAt") + c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, + stack_integrations=True, version=True) + c["smtp"] = helper.has_smtp() + c["iceServers"]: assist.get_ice_servers() + return { + 'jwt': r.pop('jwt'), + 'data': { + "user": r, + "client": c + } + } + + +@app.get('/account', tags=['accounts']) +def get_account(context: schemas.CurrentContext = Depends(OR_context)): + r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) + return { + 'data': { + **r, + "limits": { + "teamMember": -1, + "projects": -1, + "metadata": metadata.get_remaining_metadata_with_count(context.tenant_id) + }, + **license.get_status(context.tenant_id), + "smtp": helper.has_smtp(), + "iceServers": assist.get_ice_servers() + } + } + + +@app.get('/projects/limit', tags=['projects']) +def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": { + "current": projects.count_by_tenant(tenant_id=context.tenant_id), + "remaining": -1 + }} + + +@app.get('/projects/{projectId}', tags=['projects']) +def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = projects.get_project(tenant_id=context.tenant_id, project_id=projectId, include_last_session=True, + include_gdpr=True) + if data is None: + return {"errors": ["project not found"]} + return {"data": data} + + +@app.put('/integrations/slack', tags=['integrations']) +@app.post('/integrations/slack', tags=['integrations']) +def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentContext = Depends(OR_context)): + n = Slack.add_channel(tenant_id=context.tenant_id, url=data.url, name=data.name) + if n is None: + return { + "errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."] + } + return {"data": n} + + +@app.put('/integrations/slack/{integrationId}', tags=['integrations']) +@app.post('/integrations/slack/{integrationId}', tags=['integrations']) +def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + if len(data.url) > 0: + old = webhook.get(tenant_id=context.tenant_id, webhook_id=integrationId) + if old["endpoint"] != data.url: + if not Slack.say_hello(data.url): + return { + "errors": [ + "We couldn't send you a test message on your Slack channel. Please verify your webhook url."] + } + return {"data": webhook.update(tenant_id=context.tenant_id, webhook_id=integrationId, + changes={"name": data.name, "endpoint": data.url})} + + +# this endpoint supports both jira & github based on `provider` attribute +@app.post('/integrations/issues', tags=["integrations"]) +def add_edit_jira_cloud_github(data: schemas.JiraGithubSchema, + context: schemas.CurrentContext = Depends(OR_context)): + provider = data.provider.upper() + error, integration = integrations_manager.get_integration(tool=provider, tenant_id=context.tenant_id, + user_id=context.user_id) + if error is not None: + return error + return {"data": integration.add_edit(data=data.dict())} + + +@app.post('/client/members', tags=["client"]) +@app.put('/client/members', tags=["client"]) +def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(), + background_tasks=background_tasks) + + +@public_app.get('/users/invitation', tags=['users']) +def process_invitation_link(token: str): + if token is None or len(token) < 64: + return {"errors": ["please provide a valid invitation"]} + user = users.get_by_invitation_token(token) + if user is None: + return {"errors": ["invitation not found"]} + if user["expiredInvitation"]: + return {"errors": ["expired invitation, please ask your admin to send a new one"]} + if user["expiredChange"] is not None and not user["expiredChange"] \ + and user["changePwdToken"] is not None and user["changePwdAge"] < -5 * 60: + pass_token = user["changePwdToken"] + else: + pass_token = users.allow_password_change(user_id=user["userId"]) + return RedirectResponse(url=config("SITE_URL") + config("change_password_link") % (token, pass_token)) + + +@public_app.post('/password/reset', tags=["users"]) +@public_app.put('/password/reset', tags=["users"]) +def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)): + if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8: + return {"errors": ["please provide a valid invitation & pass"]} + user = users.get_by_invitation_token(token=data.invitation, pass_token=data.passphrase) + if user is None: + return {"errors": ["invitation not found"]} + if user["expiredChange"]: + return {"errors": ["expired change, please re-use the invitation link"]} + + return users.set_password_invitation(new_password=data.password, user_id=user["userId"]) + + +@app.put('/client/members/{memberId}', tags=["client"]) +@app.post('/client/members/{memberId}', tags=["client"]) +def edit_member(memberId: int, data: schemas.EditMemberSchema, + context: schemas.CurrentContext = Depends(OR_context)): + return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data.dict(), + user_id_to_update=memberId) + + +@app.get('/metadata/session_search', tags=["metadata"]) +def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None, + context: schemas.CurrentContext = Depends(OR_context)): + if key is None or value is None or len(value) == 0 and len(key) == 0: + return {"errors": ["please provide a key&value for search"]} + if len(value) == 0: + return {"errors": ["please provide a value for search"]} + if len(key) == 0: + return {"errors": ["please provide a key for search"]} + return { + "data": sessions.search_by_metadata(tenant_id=context.tenant_id, user_id=context.user_id, m_value=value, + m_key=key, project_id=projectId)} + + +@app.get('/plans', tags=["plan"]) +def get_current_plan(context: schemas.CurrentContext = Depends(OR_context)): + return { + "data": license.get_status(context.tenant_id) + } + + +@public_app.post('/alerts/notifications', tags=["alerts"]) +@public_app.put('/alerts/notifications', tags=["alerts"]) +def send_alerts_notifications(background_tasks: BackgroundTasks, data: schemas.AlertNotificationSchema = Body(...)): + # TODO: validate token + return {"data": alerts.process_notifications(data.notifications, background_tasks=background_tasks)} + + +@public_app.get('/general_stats', tags=["private"], include_in_schema=False) +def get_general_stats(): + return {"data": {"sessions:": sessions.count_all()}} diff --git a/ee/api/chalicelib/blueprints/__init__.py b/api/routers/crons/__init__.py similarity index 100% rename from ee/api/chalicelib/blueprints/__init__.py rename to api/routers/crons/__init__.py diff --git a/api/routers/crons/core_crons.py b/api/routers/crons/core_crons.py new file mode 100644 index 000000000..0360bcd67 --- /dev/null +++ b/api/routers/crons/core_crons.py @@ -0,0 +1,15 @@ +from chalicelib.core import weekly_report, jobs + + +async def run_scheduled_jobs() -> None: + jobs.execute_jobs() + + +async def weekly_report2() -> None: + weekly_report.cron() + + +cron_jobs = [ + {"func": run_scheduled_jobs, "trigger": "interval", "seconds": 60, "misfire_grace_time": 20}, + {"func": weekly_report2, "trigger": "cron", "day_of_week": "mon", "hour": 5} +] diff --git a/api/routers/crons/core_dynamic_crons.py b/api/routers/crons/core_dynamic_crons.py new file mode 100644 index 000000000..78d91856d --- /dev/null +++ b/api/routers/crons/core_dynamic_crons.py @@ -0,0 +1,10 @@ +from chalicelib.core import telemetry + + +def telemetry_cron() -> None: + telemetry.compute() + + +cron_jobs = [ + {"func": telemetry_cron, "trigger": "cron", "day_of_week": "*"} +] diff --git a/ee/api/chalicelib/blueprints/subs/__init__.py b/api/routers/subs/__init__.py similarity index 100% rename from ee/api/chalicelib/blueprints/subs/__init__.py rename to api/routers/subs/__init__.py diff --git a/api/routers/subs/dashboard.py b/api/routers/subs/dashboard.py new file mode 100644 index 000000000..169893693 --- /dev/null +++ b/api/routers/subs/dashboard.py @@ -0,0 +1,346 @@ +from fastapi import Body + +import schemas +from chalicelib.core import dashboard +from chalicelib.core import metadata +from chalicelib.utils import helper +from routers.base import get_routers + +public_app, app, app_apikey = get_routers() + + +@app.get('/{projectId}/dashboard/metadata', tags=["dashboard", "metrics"]) +def get_metadata_map(projectId: int): + metamap = [] + for m in metadata.get(project_id=projectId): + metamap.append({"name": m["key"], "key": f"metadata{m['index']}"}) + return {"data": metamap} + + +@app.post('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/sessions', tags=["dashboard", "metrics"]) +def get_dashboard_processed_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_processed_sessions(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/errors', tags=["dashboard", "metrics"]) +def get_dashboard_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_errors(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/errors_trend', tags=["dashboard", "metrics"]) +def get_dashboard_errors_trend(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_errors_trend(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/application_activity', tags=["dashboard", "metrics"]) +def get_dashboard_application_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_application_activity(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/page_metrics', tags=["dashboard", "metrics"]) +def get_dashboard_page_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_page_metrics(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/user_activity', tags=["dashboard", "metrics"]) +def get_dashboard_user_activity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_user_activity(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/performance', tags=["dashboard", "metrics"]) +def get_dashboard_performance(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_performance(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/slowest_images', tags=["dashboard", "metrics"]) +def get_dashboard_slowest_images(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_slowest_images(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/missing_resources', tags=["dashboard", "metrics"]) +def get_performance_sessions(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_missing_resources_trend(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/dashboard/network', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/network', tags=["dashboard", "metrics"]) +def get_network_widget(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_network(project_id=projectId, **data.dict())} + + +@app.get('/{projectId}/dashboard/{widget}/search', tags=["dashboard", "metrics"]) +def get_dashboard_autocomplete(projectId: int, widget: str, q: str, type: str = "", platform: str = None, + key: str = ""): + if q is None or len(q) == 0: + return {"data": []} + q = '^' + q + + if widget in ['performance']: + data = dashboard.search(q, type, project_id=projectId, + platform=platform, performance=True) + elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render', + 'impacted_sessions_by_slow_pages', 'pages_response_time']: + data = dashboard.search(q, type, project_id=projectId, + platform=platform, pages_only=True) + elif widget in ['resources_loading_time']: + data = dashboard.search(q, type, project_id=projectId, + platform=platform, performance=False) + elif widget in ['time_between_events', 'events']: + data = dashboard.search(q, type, project_id=projectId, + platform=platform, performance=False, events_only=True) + elif widget in ['metadata']: + data = dashboard.search(q, None, project_id=projectId, + platform=platform, metadata=True, key=key) + else: + return {"errors": [f"unsupported widget: {widget}"]} + return {'data': data} + + +# 1 +@app.post('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/slowest_resources', tags=["dashboard", "metrics"]) +def get_dashboard_slowest_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_slowest_resources(project_id=projectId, **data.dict())} + + +# 2 +@app.post('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/resources_loading_time', tags=["dashboard", "metrics"]) +def get_dashboard_resources(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_resources_loading_time(project_id=projectId, **data.dict())} + + +# 3 +@app.post('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/pages_dom_buildtime', tags=["dashboard", "metrics"]) +def get_dashboard_pages_dom(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict())} + + +# 4 +@app.post('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/busiest_time_of_day', tags=["dashboard", "metrics"]) +def get_dashboard_busiest_time_of_day(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_busiest_time_of_day(project_id=projectId, **data.dict())} + + +# 5 +@app.post('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/sessions_location', tags=["dashboard", "metrics"]) +def get_dashboard_sessions_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_sessions_location(project_id=projectId, **data.dict())} + + +# 6 +@app.post('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/speed_location', tags=["dashboard", "metrics"]) +def get_dashboard_speed_location(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_speed_index_location(project_id=projectId, **data.dict())} + + +# 7 +@app.post('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/pages_response_time', tags=["dashboard", "metrics"]) +def get_dashboard_pages_response_time(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_pages_response_time(project_id=projectId, **data.dict())} + + +# 8 +@app.post('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/pages_response_time_distribution', tags=["dashboard", "metrics"]) +def get_dashboard_pages_response_time_distribution(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_pages_response_time_distribution(project_id=projectId, **data.dict())} + + +# 9 +@app.post('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/top_metrics', tags=["dashboard", "metrics"]) +def get_dashboard_top_metrics(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_top_metrics(project_id=projectId, **data.dict())} + + +# 10 +@app.post('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/time_to_render', tags=["dashboard", "metrics"]) +def get_dashboard_time_to_render(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_time_to_render(project_id=projectId, **data.dict())} + + +# 11 +@app.post('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/impacted_sessions_by_slow_pages', tags=["dashboard", "metrics"]) +def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_impacted_sessions_by_slow_pages(project_id=projectId, **data.dict())} + + +# 12 +@app.post('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/memory_consumption', tags=["dashboard", "metrics"]) +def get_dashboard_memory_consumption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_memory_consumption(project_id=projectId, **data.dict())} + + +# 12.1 +@app.post('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/fps', tags=["dashboard", "metrics"]) +def get_dashboard_avg_fps(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_avg_fps(project_id=projectId, **data.dict())} + + +# 12.2 +@app.post('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/cpu', tags=["dashboard", "metrics"]) +def get_dashboard_avg_cpu(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_avg_cpu(project_id=projectId, **data.dict())} + + +# 13 +@app.post('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/crashes', tags=["dashboard", "metrics"]) +def get_dashboard_impacted_sessions_by_slow_pages(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_crashes(project_id=projectId, **data.dict())} + + +# 14 +@app.post('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/domains_errors', tags=["dashboard", "metrics"]) +def get_dashboard_domains_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_domains_errors(project_id=projectId, **data.dict())} + + +# 14.1 +@app.post('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/domains_errors_4xx', tags=["dashboard", "metrics"]) +def get_dashboard_domains_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_domains_errors_4xx(project_id=projectId, **data.dict())} + + +# 14.2 +@app.post('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/domains_errors_5xx', tags=["dashboard", "metrics"]) +def get_dashboard_domains_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_domains_errors_5xx(project_id=projectId, **data.dict())} + + +# 15 +@app.post('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/slowest_domains', tags=["dashboard", "metrics"]) +def get_dashboard_slowest_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_slowest_domains(project_id=projectId, **data.dict())} + + +# 16 +@app.post('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/errors_per_domains', tags=["dashboard", "metrics"]) +def get_dashboard_errors_per_domains(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_errors_per_domains(project_id=projectId, **data.dict())} + + +# 17 +@app.post('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/sessions_per_browser', tags=["dashboard", "metrics"]) +def get_dashboard_sessions_per_browser(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_sessions_per_browser(project_id=projectId, **data.dict())} + + +# 18 +@app.post('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/calls_errors', tags=["dashboard", "metrics"]) +def get_dashboard_calls_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_calls_errors(project_id=projectId, **data.dict())} + + +# 18.1 +@app.post('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/calls_errors_4xx', tags=["dashboard", "metrics"]) +def get_dashboard_calls_errors_4xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_calls_errors_4xx(project_id=projectId, **data.dict())} + + +# 18.2 +@app.post('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/calls_errors_5xx', tags=["dashboard", "metrics"]) +def get_dashboard_calls_errors_5xx(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_calls_errors_5xx(project_id=projectId, **data.dict())} + + +# 19 +@app.post('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/errors_per_type', tags=["dashboard", "metrics"]) +def get_dashboard_errors_per_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_errors_per_type(project_id=projectId, **data.dict())} + + +# 20 +@app.post('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/resources_by_party', tags=["dashboard", "metrics"]) +def get_dashboard_resources_by_party(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_resources_by_party(project_id=projectId, **data.dict())} + + +# 21 +@app.post('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/resource_type_vs_response_end', tags=["dashboard", "metrics"]) +def get_dashboard_errors_per_resource_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.resource_type_vs_response_end(project_id=projectId, **data.dict())} + + +# 22 +@app.post('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/resources_vs_visually_complete', tags=["dashboard", "metrics"]) +def get_dashboard_resources_vs_visually_complete(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_resources_vs_visually_complete(project_id=projectId, **data.dict())} + + +# 23 +@app.post('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/impacted_sessions_by_js_errors', tags=["dashboard", "metrics"]) +def get_dashboard_impacted_sessions_by_js_errors(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_impacted_sessions_by_js_errors(project_id=projectId, **data.dict())} + + +# 24 +@app.post('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/resources_count_by_type', tags=["dashboard", "metrics"]) +def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": dashboard.get_resources_count_by_type(project_id=projectId, **data.dict())} + + +# # 25 +# @app.post('/{projectId}/dashboard/time_between_events', tags=["dashboard", "metrics"]) +# @app.get('/{projectId}/dashboard/time_between_events', tags=["dashboard", "metrics"]) +# def get_dashboard_resources_count_by_type(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): +# return {"errors": ["please choose 2 events"]} + + +@app.post('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"]) +@app.get('/{projectId}/dashboard/overview', tags=["dashboard", "metrics"]) +def get_dashboard_group(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": [ + *helper.explode_widget(key="count_sessions", + data=dashboard.get_processed_sessions(project_id=projectId, **data.dict())), + *helper.explode_widget(data={**dashboard.get_application_activity(project_id=projectId, **data.dict()), + "chart": dashboard.get_performance(project_id=projectId, **data.dict()) + .get("chart", [])}), + *helper.explode_widget(data=dashboard.get_page_metrics(project_id=projectId, **data.dict())), + *helper.explode_widget(data=dashboard.get_user_activity(project_id=projectId, **data.dict())), + *helper.explode_widget(data=dashboard.get_pages_dom_build_time(project_id=projectId, **data.dict()), + key="avg_pages_dom_buildtime"), + *helper.explode_widget(data=dashboard.get_pages_response_time(project_id=projectId, **data.dict()), + key="avg_pages_response_time"), + *helper.explode_widget(dashboard.get_top_metrics(project_id=projectId, **data.dict())), + *helper.explode_widget(data=dashboard.get_time_to_render(project_id=projectId, **data.dict()), + key="avg_time_to_render"), + *helper.explode_widget(dashboard.get_memory_consumption(project_id=projectId, **data.dict())), + *helper.explode_widget(dashboard.get_avg_cpu(project_id=projectId, **data.dict())), + *helper.explode_widget(dashboard.get_avg_fps(project_id=projectId, **data.dict())), + ]} diff --git a/api/routers/subs/insights.py b/api/routers/subs/insights.py new file mode 100644 index 000000000..cce4917d4 --- /dev/null +++ b/api/routers/subs/insights.py @@ -0,0 +1,108 @@ +from fastapi import Body + +import schemas +from chalicelib.core import insights +from routers.base import get_routers + +public_app, app, app_apikey = get_routers() + + +@app.post('/{projectId}/insights/journey', tags=["insights"]) +@app.get('/{projectId}/insights/journey', tags=["insights"]) +def get_insights_journey(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.journey(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/users_acquisition', tags=["insights"]) +@app.get('/{projectId}/insights/users_acquisition', tags=["insights"]) +def get_users_acquisition(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.users_acquisition(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/users_retention', tags=["insights"]) +@app.get('/{projectId}/insights/users_retention', tags=["insights"]) +def get_users_retention(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.users_retention(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/feature_retention', tags=["insights"]) +@app.get('/{projectId}/insights/feature_retention', tags=["insights"]) +def get_feature_rentention(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.feature_retention(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/feature_acquisition', tags=["insights"]) +@app.get('/{projectId}/insights/feature_acquisition', tags=["insights"]) +def get_feature_acquisition(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.feature_acquisition(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/feature_popularity_frequency', tags=["insights"]) +@app.get('/{projectId}/insights/feature_popularity_frequency', tags=["insights"]) +def get_feature_popularity_frequency(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.feature_popularity_frequency(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/feature_intensity', tags=["insights"]) +@app.get('/{projectId}/insights/feature_intensity', tags=["insights"]) +def get_feature_intensity(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.feature_intensity(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/feature_adoption', tags=["insights"]) +@app.get('/{projectId}/insights/feature_adoption', tags=["insights"]) +def get_feature_adoption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.feature_adoption(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/feature_adoption_top_users', tags=["insights"]) +@app.get('/{projectId}/insights/feature_adoption_top_users', tags=["insights"]) +def get_feature_adoption(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.feature_adoption_top_users(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/users_active', tags=["insights"]) +@app.get('/{projectId}/insights/users_active', tags=["insights"]) +def get_users_active(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.users_active(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/users_power', tags=["insights"]) +@app.get('/{projectId}/insights/users_power', tags=["insights"]) +def get_users_power(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.users_power(project_id=projectId, **data.dict())} + + +@app.post('/{projectId}/insights/users_slipping', tags=["insights"]) +@app.get('/{projectId}/insights/users_slipping', tags=["insights"]) +def get_users_slipping(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): + return {"data": insights.users_slipping(project_id=projectId, **data.dict())} + +# +# +# @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) +# def get_dashboard_autocomplete(projectId:int, widget): +# params = app.current_request.query_params +# if params is None or params.get('q') is None or len(params.get('q')) == 0: +# return {"data": []} +# params['q'] = '^' + params['q'] +# +# if widget in ['performance']: +# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, +# platform=params.get('platform', None), performance=True) +# elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render', +# 'impacted_sessions_by_slow_pages', 'pages_response_time']: +# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, +# platform=params.get('platform', None), pages_only=True) +# elif widget in ['resources_loading_time']: +# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, +# platform=params.get('platform', None), performance=False) +# elif widget in ['time_between_events', 'events']: +# data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, +# platform=params.get('platform', None), performance=False, events_only=True) +# elif widget in ['metadata']: +# data = dashboard.search(params.get('q', ''), None, project_id=projectId, +# platform=params.get('platform', None), metadata=True, key=params.get("key")) +# else: +# return {"errors": [f"unsupported widget: {widget}"]} +# return {'data': data} diff --git a/api/run-dev.sh b/api/run-dev.sh new file mode 100755 index 000000000..76682286d --- /dev/null +++ b/api/run-dev.sh @@ -0,0 +1,3 @@ +#!/bin/zsh + +uvicorn app:app --reload \ No newline at end of file diff --git a/api/schemas.py b/api/schemas.py new file mode 100644 index 000000000..7884f27aa --- /dev/null +++ b/api/schemas.py @@ -0,0 +1,379 @@ +from typing import Optional, List, Literal + +from pydantic import BaseModel, Field, EmailStr, HttpUrl + +from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils.helper import key_to_camel_case + + +class _Grecaptcha(BaseModel): + g_recaptcha_response: Optional[str] = Field(None, alias='g-recaptcha-response') + + +class UserLoginSchema(_Grecaptcha): + email: EmailStr = Field(...) + password: str = Field(...) + + +class UserSignupSchema(UserLoginSchema): + fullname: str = Field(...) + organizationName: str = Field(...) + projectName: str = Field(default="my first project") + + class Config: + alias_generator = key_to_camel_case + + +class EditUserSchema(BaseModel): + name: Optional[str] = Field(None) + email: Optional[str] = Field(None) + admin: Optional[bool] = Field(False) + appearance: Optional[dict] = Field({}) + + +class ForgetPasswordPayloadSchema(_Grecaptcha): + email: str = Field(...) + + +class EditUserPasswordSchema(BaseModel): + old_password: str = Field(...) + new_password: str = Field(...) + + class Config: + alias_generator = key_to_camel_case + + +class UpdateTenantSchema(BaseModel): + name: Optional[str] = Field(None) + opt_out: Optional[bool] = Field(None) + + class Config: + alias_generator = key_to_camel_case + + +class CreateProjectSchema(BaseModel): + name: str = Field("my first project") + + +class CurrentAPIContext(BaseModel): + tenant_id: int = Field(...) + + +class CurrentContext(CurrentAPIContext): + user_id: int = Field(...) + email: str = Field(...) + + +class AddSlackSchema(BaseModel): + name: str = Field(...) + url: HttpUrl = Field(...) + + +class EditSlackSchema(BaseModel): + name: Optional[str] = Field(None) + url: HttpUrl = Field(...) + + +class SearchErrorsSchema(BaseModel): + platform: Optional[str] = Field(None) + startDate: Optional[int] = Field(TimeUTC.now(-7)) + endDate: Optional[int] = Field(TimeUTC.now()) + density: Optional[int] = Field(7) + sort: Optional[str] = Field(None) + order: Optional[str] = Field(None) + + +class EmailNotificationSchema(BaseModel): + notification: str = Field(...) + destination: str = Field(...) + + +class AlertNotificationSchema(BaseModel): + auth: str = Field(...) + notifications: List[EmailNotificationSchema] = Field(...) + + +class CreateNotificationSchema(BaseModel): + token: str = Field(...) + notifications: List = Field(...) + + +class NotificationsViewSchema(BaseModel): + ids: Optional[List] = Field(...) + startTimestamp: int = Field(...) + endTimestamp: int = Field(...) + + +class JiraGithubSchema(BaseModel): + provider: str = Field(...) + username: str = Field(...) + token: str = Field(...) + url: str = Field(...) + + +class CreateEditWebhookSchema(BaseModel): + webhookId: Optional[int] = Field(None) + endpoint: str = Field(...) + authHeader: Optional[str] = Field(None) + name: Optional[str] = Field(...) + + +class CreateMemberSchema(BaseModel): + userId: Optional[int] = Field(None) + name: str = Field(...) + email: str = Field(...) + admin: bool = Field(False) + + +class EditMemberSchema(BaseModel): + name: str = Field(...) + email: str = Field(...) + admin: bool = Field(False) + + +class EditPasswordByInvitationSchema(BaseModel): + invitation: str = Field(...) + passphrase: str = Field(..., alias="pass") + password: str = Field(...) + + +class AssignmentSchema(BaseModel): + assignee: str = Field(...) + description: str = Field(...) + title: str = Field(...) + issue_type: str = Field(...) + + class Config: + alias_generator = key_to_camel_case + + +class CommentAssignmentSchema(BaseModel): + message: str = Field(...) + + +class IntegrationNotificationSchema(BaseModel): + comment: Optional[str] = Field(None) + + +class GdprSchema(BaseModel): + maskEmails: bool = Field(...) + sampleRate: int = Field(...) + maskNumbers: bool = Field(...) + defaultInputMode: str = Field(...) + + +class SampleRateSchema(BaseModel): + rate: int = Field(...) + captureAll: bool = Field(False) + + +class WeeklyReportConfigSchema(BaseModel): + weekly_report: bool = Field(True) + + class Config: + alias_generator = key_to_camel_case + + +class GetHeatmapPayloadSchema(BaseModel): + startDate: int = Field(TimeUTC.now(delta_days=-30)) + endDate: int = Field(TimeUTC.now()) + url: str = Field(...) + + +class DatadogSchema(BaseModel): + apiKey: str = Field(...) + applicationKey: str = Field(...) + + +class StackdriverSchema(BaseModel): + serviceAccountCredentials: str = Field(...) + logName: str = Field(...) + + +class NewrelicSchema(BaseModel): + applicationId: str = Field(...) + xQueryKey: str = Field(...) + region: str = Field(...) + + +class RollbarSchema(BaseModel): + accessToken: str = Field(...) + + +class BugsnagBasicSchema(BaseModel): + authorizationToken: str = Field(...) + + +class BugsnagSchema(BugsnagBasicSchema): + bugsnagProjectId: str = Field(...) + + +class CloudwatchBasicSchema(BaseModel): + awsAccessKeyId: str = Field(...) + awsSecretAccessKey: str = Field(...) + region: str = Field(...) + + +class CloudwatchSchema(CloudwatchBasicSchema): + logGroupName: str = Field(...) + + +class ElasticsearchBasicSchema(BaseModel): + host: str = Field(...) + port: int = Field(...) + apiKeyId: str = Field(...) + apiKey: str = Field(...) + + +class ElasticsearchSchema(ElasticsearchBasicSchema): + indexes: str = Field(...) + + +class SumologicSchema(BaseModel): + accessId: str = Field(...) + accessKey: str = Field(...) + region: str = Field(...) + + +class MetadataBasicSchema(BaseModel): + index: Optional[int] = Field(None) + key: str = Field(...) + + +class MetadataListSchema(BaseModel): + list: List[MetadataBasicSchema] = Field(...) + + +class EmailPayloadSchema(BaseModel): + auth: str = Field(...) + email: EmailStr = Field(...) + link: str = Field(...) + message: str = Field(...) + + +class WeeklyReportPayloadSchema(BaseModel): + auth: str = Field(...) + email: EmailStr = Field(...) + data: dict = Field(...) + + +class MemberInvitationPayloadSchema(BaseModel): + auth: str = Field(...) + email: EmailStr = Field(...) + invitation_link: str = Field(...) + client_id: str = Field(...) + sender_name: str = Field(...) + + class Config: + alias_generator = key_to_camel_case + + +class ErrorIdsPayloadSchema(BaseModel): + errors: List[str] = Field([]) + + +class _AlertMessageSchema(BaseModel): + type: str = Field(...) + value: str = Field(...) + + +class _AlertOptionSchema(BaseModel): + message: List[_AlertMessageSchema] = Field([]) + currentPeriod: int = Field(...) + previousPeriod: int = Field(...) + lastNotification: Optional[int] = Field(None) + renotifyInterval: Optional[int] = Field(720) + + +class _AlertQuerySchema(BaseModel): + left: str = Field(...) + right: float = Field(...) + operator: Literal["<", ">", "<=", ">="] = Field(...) + + +class AlertSchema(BaseModel): + name: str = Field(...) + detectionMethod: str = Field(...) + description: Optional[str] = Field(None) + options: _AlertOptionSchema = Field(...) + query: _AlertQuerySchema = Field(...) + + +class SourcemapUploadPayloadSchema(BaseModel): + urls: List[str] = Field(..., alias="URL") + + +class _SessionSearchEventSchema(BaseModel): + value: Optional[str] = Field(...) + type: str = Field(...) + operator: str = Field(...) + source: Optional[str] = Field(...) + + +class _SessionSearchFilterSchema(_SessionSearchEventSchema): + value: List[str] = Field(...) + + +class SessionsSearchPayloadSchema(BaseModel): + events: List[_SessionSearchEventSchema] = Field([]) + filters: List[_SessionSearchFilterSchema] = Field([]) + # custom:dict=Field(...) + # rangeValue:str=Field(...) + startDate: int = Field(...) + endDate: int = Field(...) + sort: str = Field(...) + order: str = Field(...) + + +class FunnelSearchPayloadSchema(SessionsSearchPayloadSchema): + range_value: Optional[str] = Field(None) + sort: Optional[str] = Field(None) + order: Optional[str] = Field(None) + + class Config: + alias_generator = key_to_camel_case + + +class FunnelSchema(BaseModel): + name: str = Field(...) + filter: FunnelSearchPayloadSchema = Field([]) + is_public: bool = Field(False) + + class Config: + alias_generator = key_to_camel_case + + +class UpdateFunnelSchema(FunnelSchema): + name: Optional[str] = Field(None) + filter: Optional[FunnelSearchPayloadSchema] = Field(None) + is_public: Optional[bool] = Field(None) + + +class FunnelInsightsPayloadSchema(SessionsSearchPayloadSchema): + sort: Optional[str] = Field(None) + order: Optional[str] = Field(None) + + +class MetricPayloadSchema(BaseModel): + startTimestamp: int = Field(TimeUTC.now(delta_days=-1)) + endTimestamp: int = Field(TimeUTC.now()) + density: int = Field(7) + filters: List[dict] = Field([]) + type: Optional[str] = Field(None) + + class Config: + alias_generator = key_to_camel_case + + +class AssistSearchPayloadSchema(BaseModel): + filters: List[dict] = Field([]) + + +class SentrySchema(BaseModel): + projectSlug: str = Field(...) + organizationSlug: str = Field(...) + token: str = Field(...) + + +class MobileSignPayloadSchema(BaseModel): + keys: List[str] = Field(...) diff --git a/ee/api/.env.default b/ee/api/.env.default new file mode 100644 index 000000000..b78536aa3 --- /dev/null +++ b/ee/api/.env.default @@ -0,0 +1,54 @@ +EMAIL_FROM=OpenReplay +EMAIL_HOST= +EMAIL_PASSWORD= +EMAIL_PORT=587 +EMAIL_SSL_CERT= +EMAIL_SSL_KEY= +EMAIL_USER= +EMAIL_USE_SSL=false +EMAIL_USE_TLS=true +LICENSE_KEY= +S3_HOST= +S3_KEY= +S3_SECRET= +SAML2_MD_URL= +SITE_URL= +alert_ntf=http://127.0.0.1:8000/async/alerts/notifications/%s +announcement_url= +assign_link=http://127.0.0.1:8000/async/email_assignment +async_Token= +captcha_key= +captcha_server= +ch_host= +ch_port= +change_password_link=/reset-password?invitation=%s&&pass=%s +email_basic=http://127.0.0.1:8000/async/basic/%s +email_funnel=http://127.0.0.1:8000/async/funnel/%s +email_plans=http://127.0.0.1:8000/async/plans/%s +email_signup=http://127.0.0.1:8000/async/email_signup/%s +idp_entityId= +idp_sls_url= +idp_sso_url= +idp_x509cert= +invitation_link=/api/users/invitation?token=%s +isEE=true +isFOS=false +js_cache_bucket=sessions-assets +jwt_algorithm=HS512 +jwt_exp_delta_seconds=2592000 +jwt_issuer=openreplay-default-ee +jwt_secret="SET A RANDOM STRING HERE" +peers=http://utilities-openreplay.app.svc.cluster.local:9000/assist/%s/peers +pg_dbname=app +pg_host=127.0.0.1 +pg_password= +pg_port=9202 +pg_user= +put_S3_TTL=20 +sentryURL= +sessions_bucket=mobs +sessions_region=us-east-1 +sourcemaps_bucket=sourcemaps +sourcemaps_reader=http://utilities-openreplay.app.svc.cluster.local:9000/sourcemaps +stage=default-ee +version_number=1.0.0 diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 41d3d640f..70fc40afc 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -210,7 +210,6 @@ Pipfile /chalicelib/core/sessions_favorite_viewed.py /chalicelib/core/sessions_metas.py /chalicelib/core/sessions_mobs.py -/chalicelib/core/sessions.py /chalicelib/core/significance.py /chalicelib/core/slack.py /chalicelib/core/socket_ios.py @@ -235,12 +234,25 @@ Pipfile /chalicelib/utils/smtp.py /chalicelib/utils/strings.py /chalicelib/utils/TimeUTC.py -/chalicelib/core/heatmaps.py +/chalicelib/blueprints/app/__init__.py +/routers/app/__init__.py +/routers/crons/__init__.py +/routers/subs/__init__.py +/routers/__init__.py +/chalicelib/core/assist.py +/auth/auth_apikey.py +/auth/auth_jwt.py +/chalicelib/blueprints/subs/bp_insights.py +/build.sh +/routers/core.py +/routers/crons/core_crons.py +/routers/subs/dashboard.py +/db_changes.sql +/Dockerfile.bundle /entrypoint.bundle.sh /entrypoint.sh -/env_handler.py +/chalicelib/core/heatmaps.py +/routers/subs/insights.py +/schemas.py /chalicelib/blueprints/app/v1_api.py -/build.sh -/chalicelib/core/assist.py -/chalicelib/blueprints/app/__init__.py -/Dockerfile.bundle +/routers/app/v1_api.py diff --git a/ee/api/Dockerfile b/ee/api/Dockerfile index 649e1f686..284d752ff 100644 --- a/ee/api/Dockerfile +++ b/ee/api/Dockerfile @@ -1,10 +1,11 @@ -FROM python:3.6-slim +FROM python:3.9.7-slim LABEL Maintainer="Rajesh Rajendran" +LABEL Maintainer="KRAIEM Taha Yassine" RUN apt-get update && apt-get install -y pkg-config libxmlsec1-dev gcc && rm -rf /var/lib/apt/lists/* WORKDIR /work COPY . . -RUN pip install -r requirements.txt -t ./vendor --upgrade -RUN pip install chalice==1.22.2 +RUN pip install -r requirements.txt +RUN mv .env.default .env # Add Tini # Startup daemon diff --git a/ee/api/app.py b/ee/api/app.py index e12b64e0b..1c731c3f7 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -1,129 +1,81 @@ -import sentry_sdk -from chalice import Chalice, Response -from sentry_sdk import configure_scope +import queue + +from apscheduler.schedulers.asyncio import AsyncIOScheduler +from fastapi import FastAPI, Request +from fastapi.middleware.cors import CORSMiddleware +from starlette import status +from starlette.responses import StreamingResponse, JSONResponse -from chalicelib import _overrides -from chalicelib.blueprints import bp_authorizers -from chalicelib.blueprints import bp_core, bp_core_crons -from chalicelib.blueprints import bp_core_dynamic, bp_core_dynamic_crons -from chalicelib.blueprints import bp_ee, bp_ee_crons, bp_saml -from chalicelib.blueprints.app import v1_api, v1_api_ee -from chalicelib.blueprints.subs import bp_dashboard from chalicelib.utils import helper from chalicelib.utils import pg_client -from chalicelib.utils.helper import environ +from routers import core, core_dynamic, ee, saml +from routers.app import v1_api, v1_api_ee +from routers.crons import core_crons +from routers.crons import core_dynamic_crons +from routers.subs import dashboard -app = Chalice(app_name='parrot') -app.debug = not helper.is_production() or helper.is_local() - -sentry_sdk.init(environ["sentryURL"]) - -# Monkey-patch print for DataDog hack -import sys -import traceback - -old_tb = traceback.print_exception -old_f = sys.stdout -old_e = sys.stderr -OR_SESSION_TOKEN = None - - -class F: - def write(self, x): - if OR_SESSION_TOKEN is not None and x != '\n' and not helper.is_local(): - old_f.write(f"[or_session_token={OR_SESSION_TOKEN}] {x}") - else: - old_f.write(x) - - def flush(self): - pass - - -def tb_print_exception(etype, value, tb, limit=None, file=None, chain=True): - if OR_SESSION_TOKEN is not None and not helper.is_local(): - value = type(value)(f"[or_session_token={OR_SESSION_TOKEN}] " + str(value)) - - old_tb(etype, value, tb, limit, file, chain) - - -if helper.is_production(): - traceback.print_exception = tb_print_exception - -sys.stdout = F() -sys.stderr = F() -# ---End Monkey-patch - - -_overrides.chalice_app(app) +app = FastAPI() @app.middleware('http') -def or_middleware(event, get_response): +async def or_middleware(request: Request, call_next): from chalicelib.core import unlock if not unlock.is_valid(): - return Response(body={"errors": ["expired license"]}, status_code=403) - if "{projectid}" in event.path.lower(): - from chalicelib.core import projects - if event.context["authorizer"].get("authorizer_identity") == "api_key" \ - and not projects.is_authorized( - project_id=projects.get_internal_project_id(event.uri_params["projectId"]), - tenant_id=event.context["authorizer"]["tenantId"]) \ - or event.context["authorizer"].get("authorizer_identity", "jwt") == "jwt" \ - and not projects.is_authorized(project_id=event.uri_params["projectId"], - tenant_id=event.context["authorizer"]["tenantId"]): - print("unauthorized project") - pg_client.close() - return Response(body={"errors": ["unauthorized project"]}, status_code=401) - global OR_SESSION_TOKEN - OR_SESSION_TOKEN = app.current_request.headers.get('vnd.openreplay.com.sid', - app.current_request.headers.get('vnd.asayer.io.sid')) - if "authorizer" in event.context and event.context["authorizer"] is None: - print("Deleted user!!") - pg_client.close() - return Response(body={"errors": ["Deleted user"]}, status_code=403) + return JSONResponse(content={"errors": ["expired license"]}, status_code=status.HTTP_403_FORBIDDEN) + global OR_SESSION_TOKEN + OR_SESSION_TOKEN = request.headers.get('vnd.openreplay.com.sid', request.headers.get('vnd.asayer.io.sid')) try: if helper.TRACK_TIME: import time now = int(time.time() * 1000) - response = get_response(event) - if response.status_code == 200 and response.body is not None and response.body.get("errors") is not None: - if "not found" in response.body["errors"][0]: - response = Response(status_code=404, body=response.body) - else: - response = Response(status_code=400, body=response.body) - if response.status_code // 100 == 5 and helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local(): - with configure_scope() as scope: - scope.set_tag('stage', environ["stage"]) - scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN) - scope.set_extra("context", event.context) - sentry_sdk.capture_exception(Exception(response.body)) + response: StreamingResponse = await call_next(request) if helper.TRACK_TIME: print(f"Execution time: {int(time.time() * 1000) - now} ms") except Exception as e: - if helper.allow_sentry() and OR_SESSION_TOKEN is not None and not helper.is_local(): - with configure_scope() as scope: - scope.set_tag('stage', environ["stage"]) - scope.set_tag('openReplaySessionToken', OR_SESSION_TOKEN) - scope.set_extra("context", event.context) - sentry_sdk.capture_exception(e) - response = Response(body={"Code": "InternalServerError", - "Message": "An internal server error occurred [level=Fatal]."}, - status_code=500) + pg_client.close() + raise e pg_client.close() return response -# Open source -app.register_blueprint(bp_authorizers.app) -app.register_blueprint(bp_core.app) -app.register_blueprint(bp_core_crons.app) -app.register_blueprint(bp_core_dynamic.app) -app.register_blueprint(bp_core_dynamic_crons.app) -app.register_blueprint(bp_dashboard.app) -app.register_blueprint(v1_api.app) -app.register_blueprint(v1_api_ee.app) -# Enterprise -app.register_blueprint(bp_ee.app) -app.register_blueprint(bp_ee_crons.app) -app.register_blueprint(bp_saml.app) +origins = [ + "*", +] + +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) +app.include_router(core.public_app) +app.include_router(core.app) +app.include_router(core.app_apikey) +app.include_router(core_dynamic.public_app) +app.include_router(core_dynamic.app) +app.include_router(core_dynamic.app_apikey) +app.include_router(ee.public_app) +app.include_router(ee.app) +app.include_router(ee.app_apikey) +app.include_router(saml.public_app) +app.include_router(saml.app) +app.include_router(saml.app_apikey) +app.include_router(dashboard.app) +# app.include_router(insights.app) +app.include_router(v1_api.app_apikey) +app.include_router(v1_api_ee.app_apikey) + +app.queue_system = queue.Queue() +app.schedule = AsyncIOScheduler() +app.schedule.start() + +for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs: + app.schedule.add_job(id=job["func"].__name__, **job) +from chalicelib.core import traces + +app.schedule.add_job(id="trace_worker",**traces.cron_jobs[0]) + +for job in app.schedule.get_jobs(): + print({"Name": str(job.id), "Run Frequency": str(job.trigger), "Next Run": str(job.next_run_time)}) diff --git a/ee/api/auth/__init__.py b/ee/api/auth/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/ee/api/auth/auth_project.py b/ee/api/auth/auth_project.py new file mode 100644 index 000000000..479681cb8 --- /dev/null +++ b/ee/api/auth/auth_project.py @@ -0,0 +1,25 @@ +from fastapi import Request +from starlette import status +from starlette.exceptions import HTTPException + +import schemas +from chalicelib.core import projects +from or_dependencies import OR_context + + +class ProjectAuthorizer: + def __init__(self, project_identifier): + self.project_identifier: str = project_identifier + + async def __call__(self, request: Request) -> None: + if len(request.path_params.keys()) == 0 or request.path_params.get(self.project_identifier) is None: + return + current_user: schemas.CurrentContext = await OR_context(request) + project_identifier = request.path_params[self.project_identifier] + if (self.project_identifier == "projectId" \ + and not projects.is_authorized(project_id=project_identifier, tenant_id=current_user.tenant_id)) \ + or (self.project_identifier.lower() == "projectKey" \ + and not projects.is_authorized(project_id=projects.get_internal_project_id(project_identifier), + tenant_id=current_user.tenant_id)): + print("unauthorized project") + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="unauthorized project.") diff --git a/ee/api/chalicelib/_overrides.py b/ee/api/chalicelib/_overrides.py deleted file mode 100644 index 2bf0b6d2a..000000000 --- a/ee/api/chalicelib/_overrides.py +++ /dev/null @@ -1,104 +0,0 @@ -from chalice import Chalice, CORSConfig -from chalicelib.blueprints import bp_authorizers -from chalicelib.core import authorizers - -import sched -import threading -import time -from datetime import datetime -import pytz -from croniter import croniter - -base_time = datetime.now(pytz.utc) - -cors_config = CORSConfig( - allow_origin='*', - allow_headers=['vnd.openreplay.com.sid', 'vnd.asayer.io.sid'], - # max_age=600, - # expose_headers=['X-Special-Header'], - allow_credentials=True -) - - -def chalice_app(app): - def app_route(self, path, **kwargs): - kwargs.setdefault('cors', cors_config) - kwargs.setdefault('authorizer', bp_authorizers.jwt_authorizer) - handler_type = 'route' - name = kwargs.pop('name', None) - registration_kwargs = {'path': path, 'kwargs': kwargs, 'authorizer': kwargs.get("authorizer")} - - def _register_handler(user_handler): - handler_name = name - if handler_name is None: - handler_name = user_handler.__name__ - if registration_kwargs is not None: - kwargs = registration_kwargs - else: - kwargs = {} - - if kwargs['authorizer'] == bp_authorizers.jwt_authorizer \ - or kwargs['authorizer'] == bp_authorizers.api_key_authorizer: - def _user_handler(context=None, **args): - if context is not None: - args['context'] = context - else: - authorizer_context = app.current_request.context['authorizer'] - if kwargs['authorizer'] == bp_authorizers.jwt_authorizer: - args['context'] = authorizers.jwt_context(authorizer_context) - else: - args['context'] = authorizer_context - return user_handler(**args) - - wrapped = self._wrap_handler(handler_type, handler_name, _user_handler) - self._register_handler(handler_type, handler_name, _user_handler, wrapped, kwargs) - else: - wrapped = self._wrap_handler(handler_type, handler_name, user_handler) - self._register_handler(handler_type, handler_name, user_handler, wrapped, kwargs) - return wrapped - - return _register_handler - - app.route = app_route.__get__(app, Chalice) - - def app_schedule(self, expression, name=None, description=''): - handler_type = 'schedule' - registration_kwargs = {'expression': expression, - 'description': description} - - def _register_handler(user_handler): - handler_name = name - if handler_name is None: - handler_name = user_handler.__name__ - kwargs = registration_kwargs - cron_expression = kwargs["expression"].to_string()[len("cron("):-1] - if len(cron_expression.split(" ")) > 5: - cron_expression = " ".join(cron_expression.split(" ")[:-1]) - cron_expression = cron_expression.replace("?", "*") - cron_shell(user_handler, cron_expression) - - wrapped = self._wrap_handler(handler_type, handler_name, user_handler) - self._register_handler(handler_type, handler_name, user_handler, wrapped, kwargs) - return wrapped - - return _register_handler - - app.schedule = app_schedule.__get__(app, Chalice) - - def spawn(function, args): - th = threading.Thread(target=function, kwargs=args) - th.setDaemon(True) - th.start() - - def cron_shell(function, cron_expression): - def to_start(): - scheduler = sched.scheduler(time.time, time.sleep) - citer = croniter(cron_expression, base_time) - while True: - next_execution = citer.get_next(datetime) - print(f"{function.__name__} next execution: {next_execution}") - scheduler.enterabs(next_execution.timestamp(), 1, function, argument=(None,)) - scheduler.run() - print(f"{function.__name__} executed: {next_execution}") - - spawn(to_start, None) diff --git a/ee/api/chalicelib/blueprints/app/v1_api_ee.py b/ee/api/chalicelib/blueprints/app/v1_api_ee.py deleted file mode 100644 index 5682bf5b2..000000000 --- a/ee/api/chalicelib/blueprints/app/v1_api_ee.py +++ /dev/null @@ -1,16 +0,0 @@ -from chalice import Blueprint - -from chalicelib import _overrides -from chalicelib.blueprints import bp_authorizers -from chalicelib.utils import assist_helper - -app = Blueprint(__name__) -_overrides.chalice_app(app) - - -@app.route('/v1/assist/credentials', methods=['GET'], authorizer=bp_authorizers.api_key_authorizer) -def get_assist_credentials(context): - credentials = assist_helper.get_temporary_credentials() - if "errors" in credentials: - return credentials - return {"data": credentials} diff --git a/ee/api/chalicelib/blueprints/bp_authorizers.py b/ee/api/chalicelib/blueprints/bp_authorizers.py deleted file mode 100644 index 14abd3988..000000000 --- a/ee/api/chalicelib/blueprints/bp_authorizers.py +++ /dev/null @@ -1,38 +0,0 @@ -from chalice import Blueprint, AuthResponse -from chalicelib.utils import helper -from chalicelib.core import authorizers - -from chalicelib.core import users - -app = Blueprint(__name__) - - -@app.authorizer() -def api_key_authorizer(auth_request): - r = authorizers.api_key_authorizer(auth_request.token) - if r is None: - return AuthResponse(routes=[], principal_id=None) - r["authorizer_identity"] = "api_key" - print(r) - return AuthResponse( - routes=['*'], - principal_id=r['tenantId'], - context=r - ) - - -@app.authorizer(ttl_seconds=60) -def jwt_authorizer(auth_request): - jwt_payload = authorizers.jwt_authorizer(auth_request.token) - if jwt_payload is None \ - or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \ - or not users.auth_exists(user_id=jwt_payload["userId"], tenant_id=jwt_payload["tenantId"], - jwt_iat=jwt_payload["iat"], jwt_aud=jwt_payload["aud"]): - return AuthResponse(routes=[], principal_id=None) - jwt_payload["authorizer_identity"] = "jwt" - print(jwt_payload) - return AuthResponse( - routes=['*'], - principal_id=jwt_payload['userId'], - context=jwt_payload - ) diff --git a/ee/api/chalicelib/blueprints/bp_core_dynamic.py b/ee/api/chalicelib/blueprints/bp_core_dynamic.py deleted file mode 100644 index 6be1380f5..000000000 --- a/ee/api/chalicelib/blueprints/bp_core_dynamic.py +++ /dev/null @@ -1,470 +0,0 @@ -from chalice import Blueprint, Response - -from chalicelib import _overrides -from chalicelib.core import assist -from chalicelib.core import boarding -from chalicelib.core import errors -from chalicelib.core import license -from chalicelib.core import metadata, errors_favorite_viewed, slack, alerts, sessions, integrations_manager -from chalicelib.core import notifications -from chalicelib.core import projects -from chalicelib.core import signup -from chalicelib.core import tenants -from chalicelib.core import users -from chalicelib.core import webhook -from chalicelib.core.collaboration_slack import Slack -from chalicelib.utils import captcha, SAML2_helper -from chalicelib.utils import helper -from chalicelib.utils.helper import environ - -app = Blueprint(__name__) -_overrides.chalice_app(app) - - -@app.route('/login', methods=['POST'], authorizer=None) -def login(): - data = app.current_request.json_body - if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]): - return {"errors": ["Invalid captcha."]} - r = users.authenticate(data['email'], data['password'], for_plugin=False) - if r is None: - return Response(status_code=401, body={ - 'errors': ['You’ve entered invalid Email or Password.'] - }) - elif "errors" in r: - return r - - tenant_id = r.pop("tenantId") - # change this in open-source - r = {**r, - "limits": { - "teamMember": int(environ.get("numberOfSeats", 0)), - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(tenant_id)}, - **license.get_status(tenant_id), - "smtp": environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0, - "saml2": SAML2_helper.is_saml2_available(), - "iceServers": assist.get_ice_servers() - } - c = tenants.get_by_tenant_id(tenant_id) - c.pop("createdAt") - c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, - stack_integrations=True, version=True) - return { - 'jwt': r.pop('jwt'), - 'data': { - "user": r, - "client": c - } - } - - -@app.route('/account', methods=['GET']) -def get_account(context): - r = users.get(tenant_id=context['tenantId'], user_id=context['userId']) - return { - 'data': { - **r, - "limits": { - "teamMember": int(environ.get("numberOfSeats", 0)), - "projects": -1, - "metadata": metadata.get_remaining_metadata_with_count(context['tenantId']) - }, - **license.get_status(context["tenantId"]), - "smtp": environ["EMAIL_HOST"] is not None and len(environ["EMAIL_HOST"]) > 0, - "saml2": SAML2_helper.is_saml2_available(), - "iceServers": assist.get_ice_servers() - } - } - - -@app.route('/projects', methods=['GET']) -def get_projects(context): - return {"data": projects.get_projects(tenant_id=context["tenantId"], recording_state=True, gdpr=True, recorded=True, - stack_integrations=True, version=True)} - - -@app.route('/projects', methods=['POST', 'PUT']) -def create_project(context): - data = app.current_request.json_body - return projects.create(tenant_id=context["tenantId"], user_id=context["userId"], data=data) - - -@app.route('/projects/{projectId}', methods=['POST', 'PUT']) -def create_edit_project(projectId, context): - data = app.current_request.json_body - - return projects.edit(tenant_id=context["tenantId"], user_id=context["userId"], data=data, project_id=projectId) - - -@app.route('/projects/{projectId}', methods=['GET']) -def get_project(projectId, context): - data = projects.get_project(tenant_id=context["tenantId"], project_id=projectId, include_last_session=True, - include_gdpr=True) - if data is None: - return {"errors": ["project not found"]} - return {"data": data} - - -@app.route('/projects/{projectId}', methods=['DELETE']) -def delete_project(projectId, context): - return projects.delete(tenant_id=context["tenantId"], user_id=context["userId"], project_id=projectId) - - -@app.route('/projects/limit', methods=['GET']) -def get_projects_limit(context): - return {"data": { - "current": projects.count_by_tenant(tenant_id=context["tenantId"]), - "remaining": -1 # change this in open-source - }} - - -@app.route('/client', methods=['GET']) -def get_client(context): - r = tenants.get_by_tenant_id(context['tenantId']) - if r is not None: - r.pop("createdAt") - r["projects"] = projects.get_projects(tenant_id=context['tenantId'], recording_state=True, recorded=True, - stack_integrations=True, version=True) - return { - 'data': r - } - - -@app.route('/client/new_api_key', methods=['GET']) -def generate_new_tenant_token(context): - return { - 'data': tenants.generate_new_api_key(context['tenantId']) - } - - -@app.route('/client', methods=['PUT', 'POST']) -def put_client(context): - data = app.current_request.json_body - return tenants.update(tenant_id=context["tenantId"], user_id=context["userId"], data=data) - - -@app.route('/signup', methods=['GET'], authorizer=None) -def get_all_signup(): - return {"data": {"tenants": tenants.tenants_exists(), - "sso": SAML2_helper.is_saml2_available(), - "ssoProvider": SAML2_helper.get_saml2_provider(), - "edition": helper.get_edition()}} - - -@app.route('/signup', methods=['POST', 'PUT'], authorizer=None) -def signup_handler(): - data = app.current_request.json_body - return signup.create_step1(data) - - -@app.route('/integrations/slack', methods=['POST', 'PUT']) -def add_slack_client(context): - data = app.current_request.json_body - if "url" not in data or "name" not in data: - return {"errors": ["please provide a url and a name"]} - n = Slack.add_channel(tenant_id=context["tenantId"], url=data["url"], name=data["name"]) - if n is None: - return { - "errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."] - } - return {"data": n} - - -@app.route('/integrations/slack/{integrationId}', methods=['POST', 'PUT']) -def edit_slack_integration(integrationId, context): - data = app.current_request.json_body - if data.get("url") and len(data["url"]) > 0: - old = webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId) - if old["endpoint"] != data["url"]: - if not Slack.say_hello(data["url"]): - return { - "errors": [ - "We couldn't send you a test message on your Slack channel. Please verify your webhook url."] - } - return {"data": webhook.update(tenant_id=context["tenantId"], webhook_id=integrationId, - changes={"name": data.get("name", ""), "endpoint": data["url"]})} - - -@app.route('/{projectId}/errors/search', methods=['POST']) -def errors_search(projectId, context): - data = app.current_request.json_body - params = app.current_request.query_params - if params is None: - params = {} - - return errors.search(data, projectId, user_id=context["userId"], status=params.get("status", "ALL"), - favorite_only="favorite" in params) - - -@app.route('/{projectId}/errors/stats', methods=['GET']) -def errors_stats(projectId, context): - params = app.current_request.query_params - if params is None: - params = {} - - return errors.stats(projectId, user_id=context["userId"], **params) - - -@app.route('/{projectId}/errors/{errorId}', methods=['GET']) -def errors_get_details(projectId, errorId, context): - params = app.current_request.query_params - if params is None: - params = {} - - data = errors.get_details(project_id=projectId, user_id=context["userId"], error_id=errorId, **params) - if data.get("data") is not None: - errors_favorite_viewed.viewed_error(project_id=projectId, user_id=context['userId'], error_id=errorId) - return data - - -@app.route('/{projectId}/errors/{errorId}/stats', methods=['GET']) -def errors_get_details_right_column(projectId, errorId, context): - params = app.current_request.query_params - if params is None: - params = {} - - data = errors.get_details_chart(project_id=projectId, user_id=context["userId"], error_id=errorId, **params) - return data - - -@app.route('/{projectId}/errors/{errorId}/sourcemaps', methods=['GET']) -def errors_get_details_sourcemaps(projectId, errorId, context): - data = errors.get_trace(project_id=projectId, error_id=errorId) - if "errors" in data: - return data - return { - 'data': data - } - - -@app.route('/async/alerts/notifications/{step}', methods=['POST', 'PUT'], authorizer=None) -def send_alerts_notification_async(step): - data = app.current_request.json_body - if data.pop("auth") != environ["async_Token"]: - return {"errors": ["missing auth"]} - if step == "slack": - slack.send_batch(notifications_list=data.get("notifications")) - elif step == "email": - alerts.send_by_email_batch(notifications_list=data.get("notifications")) - elif step == "webhook": - webhook.trigger_batch(data_list=data.get("notifications")) - - -@app.route('/notifications', methods=['GET']) -def get_notifications(context): - return {"data": notifications.get_all(tenant_id=context['tenantId'], user_id=context['userId'])} - - -@app.route('/notifications/{notificationId}/view', methods=['GET']) -def view_notifications(notificationId, context): - return {"data": notifications.view_notification(notification_ids=[notificationId], user_id=context['userId'])} - - -@app.route('/notifications/view', methods=['POST', 'PUT']) -def batch_view_notifications(context): - data = app.current_request.json_body - return {"data": notifications.view_notification(notification_ids=data.get("ids", []), - startTimestamp=data.get("startTimestamp"), - endTimestamp=data.get("endTimestamp"), - user_id=context['userId'], - tenant_id=context["tenantId"])} - - -@app.route('/notifications', methods=['POST', 'PUT'], authorizer=None) -def create_notifications(): - data = app.current_request.json_body - if data.get("token", "") != "nF46JdQqAM5v9KI9lPMpcu8o9xiJGvNNWOGL7TJP": - return {"errors": ["missing token"]} - return notifications.create(data.get("notifications", [])) - - -@app.route('/boarding', methods=['GET']) -def get_boarding_state(context): - return {"data": boarding.get_state(tenant_id=context["tenantId"])} - - -@app.route('/boarding/installing', methods=['GET']) -def get_boarding_state_installing(context): - return {"data": boarding.get_state_installing(tenant_id=context["tenantId"])} - - -@app.route('/boarding/identify-users', methods=['GET']) -def get_boarding_state_identify_users(context): - return {"data": boarding.get_state_identify_users(tenant_id=context["tenantId"])} - - -@app.route('/boarding/manage-users', methods=['GET']) -def get_boarding_state_manage_users(context): - return {"data": boarding.get_state_manage_users(tenant_id=context["tenantId"])} - - -@app.route('/boarding/integrations', methods=['GET']) -def get_boarding_state_integrations(context): - return {"data": boarding.get_state_integrations(tenant_id=context["tenantId"])} - - -# this endpoint supports both jira & github based on `provider` attribute -@app.route('/integrations/issues', methods=['POST', 'PUT']) -def add_edit_jira_cloud_github(context): - data = app.current_request.json_body - provider = data.get("provider", "").upper() - error, integration = integrations_manager.get_integration(tool=provider, tenant_id=context["tenantId"], - user_id=context["userId"]) - if error is not None: - return error - return {"data": integration.add_edit(data=data)} - - -@app.route('/integrations/slack/{integrationId}', methods=['GET']) -def get_slack_webhook(integrationId, context): - return {"data": webhook.get(tenant_id=context["tenantId"], webhook_id=integrationId)} - - -@app.route('/integrations/slack/channels', methods=['GET']) -def get_slack_integration(context): - return {"data": webhook.get_by_type(tenant_id=context["tenantId"], webhook_type='slack')} - - -@app.route('/integrations/slack/{integrationId}', methods=['DELETE']) -def delete_slack_integration(integrationId, context): - return webhook.delete(context["tenantId"], integrationId) - - -@app.route('/webhooks', methods=['POST', 'PUT']) -def add_edit_webhook(context): - data = app.current_request.json_body - return {"data": webhook.add_edit(tenant_id=context["tenantId"], data=data, replace_none=True)} - - -@app.route('/webhooks', methods=['GET']) -def get_webhooks(context): - return {"data": webhook.get_by_tenant(tenant_id=context["tenantId"], replace_none=True)} - - -@app.route('/webhooks/{webhookId}', methods=['DELETE']) -def delete_webhook(webhookId, context): - return {"data": webhook.delete(tenant_id=context["tenantId"], webhook_id=webhookId)} - - -@app.route('/client/members', methods=['GET']) -def get_members(context): - return {"data": users.get_members(tenant_id=context['tenantId'])} - - -@app.route('/client/members', methods=['PUT', 'POST']) -def add_member(context): - # if SAML2_helper.is_saml2_available(): - # return {"errors": ["please use your SSO server to add teammates"]} - data = app.current_request.json_body - return users.create_member(tenant_id=context['tenantId'], user_id=context['userId'], data=data) - - -@app.route('/users/invitation', methods=['GET'], authorizer=None) -def process_invitation_link(): - params = app.current_request.query_params - if params is None or len(params.get("token", "")) < 64: - return {"errors": ["please provide a valid invitation"]} - user = users.get_by_invitation_token(params["token"]) - if user is None: - return {"errors": ["invitation not found"]} - if user["expiredInvitation"]: - return {"errors": ["expired invitation, please ask your admin to send a new one"]} - if user["expiredChange"] is not None and not user["expiredChange"] \ - and user["changePwdToken"] is not None and user["changePwdAge"] < -5 * 60: - pass_token = user["changePwdToken"] - else: - pass_token = users.allow_password_change(user_id=user["userId"]) - return Response( - status_code=307, - body='', - headers={'Location': environ["SITE_URL"] + environ["change_password_link"] % (params["token"], pass_token), - 'Content-Type': 'text/plain'}) - - -@app.route('/password/reset', methods=['POST', 'PUT'], authorizer=None) -def change_password_by_invitation(): - data = app.current_request.json_body - if data is None or len(data.get("invitation", "")) < 64 or len(data.get("pass", "")) < 8: - return {"errors": ["please provide a valid invitation & pass"]} - user = users.get_by_invitation_token(token=data["invitation"], pass_token=data["pass"]) - if user is None: - return {"errors": ["invitation not found"]} - if user["expiredChange"]: - return {"errors": ["expired change, please re-use the invitation link"]} - - return users.set_password_invitation(new_password=data["password"], user_id=user["userId"], - tenant_id=user["tenantId"]) - - -@app.route('/client/members/{memberId}', methods=['PUT', 'POST']) -def edit_member(memberId, context): - data = app.current_request.json_body - return users.edit(tenant_id=context['tenantId'], editor_id=context['userId'], changes=data, - user_id_to_update=memberId) - - -@app.route('/client/members/{memberId}/reset', methods=['GET']) -def reset_reinvite_member(memberId, context): - return users.reset_member(tenant_id=context['tenantId'], editor_id=context['userId'], user_id_to_update=memberId) - - -@app.route('/client/members/{memberId}', methods=['DELETE']) -def delete_member(memberId, context): - return users.delete_member(tenant_id=context["tenantId"], user_id=context['userId'], id_to_delete=memberId) - - -@app.route('/account/new_api_key', methods=['GET']) -def generate_new_user_token(context): - return {"data": users.generate_new_api_key(user_id=context['userId'])} - - -@app.route('/account', methods=['POST', 'PUT']) -def edit_account(context): - data = app.current_request.json_body - return users.edit(tenant_id=context['tenantId'], user_id_to_update=context['userId'], changes=data, - editor_id=context['userId']) - - -@app.route('/account/password', methods=['PUT', 'POST']) -def change_client_password(context): - data = app.current_request.json_body - return users.change_password(email=context['email'], old_password=data["oldPassword"], - new_password=data["newPassword"], tenant_id=context["tenantId"], - user_id=context["userId"]) - - -@app.route('/metadata/session_search', methods=['GET']) -def search_sessions_by_metadata(context): - params = app.current_request.query_params - if params is None: - return {"errors": ["please provide a key&value for search"]} - value = params.get('value', '') - key = params.get('key', '') - project_id = params.get('projectId') - if project_id is not None \ - and not projects.is_authorized(project_id=project_id, tenant_id=context["tenantId"]): - return {"errors": ["unauthorized project"]} - if len(value) == 0 and len(key) == 0: - return {"errors": ["please provide a key&value for search"]} - if len(value) == 0: - return {"errors": ["please provide a value for search"]} - if len(key) == 0: - return {"errors": ["please provide a key for search"]} - return { - "data": sessions.search_by_metadata(tenant_id=context["tenantId"], user_id=context["userId"], m_value=value, - m_key=key, - project_id=project_id)} - - -@app.route('/plans', methods=['GET']) -def get_current_plan(context): - return { - "data": license.get_status(context["tenantId"]) - } - - -@app.route('/alerts/notifications', methods=['POST', 'PUT'], authorizer=None) -def send_alerts_notifications(): - data = app.current_request.json_body - return {"data": alerts.process_notifications(data.get("notifications", []))} diff --git a/ee/api/chalicelib/blueprints/bp_core_dynamic_crons.py b/ee/api/chalicelib/blueprints/bp_core_dynamic_crons.py deleted file mode 100644 index b149c8807..000000000 --- a/ee/api/chalicelib/blueprints/bp_core_dynamic_crons.py +++ /dev/null @@ -1,21 +0,0 @@ -from chalice import Blueprint, Cron -from chalicelib import _overrides -from chalicelib.utils import helper - -app = Blueprint(__name__) -_overrides.chalice_app(app) -from chalicelib.core import telemetry -from chalicelib.core import unlock - - -# Run every day. -@app.schedule(Cron('0', '0', '?', '*', '*', '*')) -def telemetry_cron(event): - telemetry.compute() - - -@app.schedule(Cron('0/60', '*', '*', '*', '?', '*')) -def unlock_cron(event): - print("validating license") - unlock.check() - print(f"valid: {unlock.is_valid()}") diff --git a/ee/api/chalicelib/blueprints/bp_ee.py b/ee/api/chalicelib/blueprints/bp_ee.py deleted file mode 100644 index c71668e36..000000000 --- a/ee/api/chalicelib/blueprints/bp_ee.py +++ /dev/null @@ -1,58 +0,0 @@ -from chalice import Blueprint - -from chalicelib import _overrides -from chalicelib.core import roles -from chalicelib.core import unlock -from chalicelib.utils import assist_helper - -app = Blueprint(__name__) -_overrides.chalice_app(app) - -unlock.check() - - -@app.route('/client/roles', methods=['GET']) -def get_roles(context): - return { - 'data': roles.get_roles(tenant_id=context["tenantId"]) - } - - -@app.route('/client/roles', methods=['POST', 'PUT']) -def add_role(context): - data = app.current_request.json_body - data = roles.create(tenant_id=context['tenantId'], user_id=context['userId'], name=data["name"], - description=data.get("description"), permissions=data["permissions"]) - if "errors" in data: - return data - - return { - 'data': data - } - - -@app.route('/client/roles/{roleId}', methods=['POST', 'PUT']) -def edit_role(roleId, context): - data = app.current_request.json_body - data = roles.update(tenant_id=context['tenantId'], user_id=context['userId'], role_id=roleId, changes=data) - if "errors" in data: - return data - - return { - 'data': data - } - - -@app.route('/client/roles/{roleId}', methods=['DELETE']) -def delete_role(roleId, context): - data = roles.delete(tenant_id=context['tenantId'], user_id=context["userId"], role_id=roleId) - if "errors" in data: - return data - return { - 'data': data - } - - -@app.route('/assist/credentials', methods=['GET']) -def get_assist_credentials(context): - return {"data": assist_helper.get_full_config()} diff --git a/ee/api/chalicelib/blueprints/bp_ee_crons.py b/ee/api/chalicelib/blueprints/bp_ee_crons.py deleted file mode 100644 index 3333fbb20..000000000 --- a/ee/api/chalicelib/blueprints/bp_ee_crons.py +++ /dev/null @@ -1,6 +0,0 @@ -from chalice import Blueprint -from chalice import Cron -from chalicelib import _overrides - -app = Blueprint(__name__) -_overrides.chalice_app(app) \ No newline at end of file diff --git a/ee/api/chalicelib/blueprints/subs/bp_dashboard.py b/ee/api/chalicelib/blueprints/subs/bp_dashboard.py deleted file mode 100644 index b868f7c64..000000000 --- a/ee/api/chalicelib/blueprints/subs/bp_dashboard.py +++ /dev/null @@ -1,606 +0,0 @@ -from chalice import Blueprint -from chalicelib.utils import helper -from chalicelib import _overrides - -from chalicelib.core import dashboard - -from chalicelib.core import metadata - -app = Blueprint(__name__) -_overrides.chalice_app(app) - - -@app.route('/{projectId}/dashboard/metadata', methods=['GET']) -def get_metadata_map(projectId, context): - metamap = [] - for m in metadata.get(project_id=projectId): - metamap.append({"name": m["key"], "key": f"metadata{m['index']}"}) - return {"data": metamap} - - -@app.route('/{projectId}/dashboard/sessions', methods=['GET', 'POST']) -def get_dashboard_processed_sessions(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_processed_sessions(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/errors', methods=['GET', 'POST']) -def get_dashboard_errors(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_errors(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/errors_trend', methods=['GET', 'POST']) -def get_dashboard_errors_trend(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_errors_trend(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/application_activity', methods=['GET', 'POST']) -def get_dashboard_application_activity(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_application_activity(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/page_metrics', methods=['GET', 'POST']) -def get_dashboard_page_metrics(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_page_metrics(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/user_activity', methods=['GET', 'POST']) -def get_dashboard_user_activity(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_user_activity(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/performance', methods=['GET', 'POST']) -def get_dashboard_performance(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_performance(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/slowest_images', methods=['GET', 'POST']) -def get_dashboard_slowest_images(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_slowest_images(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/missing_resources', methods=['GET', 'POST']) -def get_performance_sessions(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_missing_resources_trend(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/network', methods=['GET', 'POST']) -def get_network_widget(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_network(project_id=projectId, **{**data, **args})} - - -@app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) -def get_dashboard_autocomplete(projectId, widget, context): - params = app.current_request.query_params - if params is None: - return {"data": []} - - if widget in ['performance']: - data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, - platform=params.get('platform', None), performance=True) - elif widget in ['pages', 'pages_dom_buildtime', 'top_metrics', 'time_to_render', - 'impacted_sessions_by_slow_pages', 'pages_response_time']: - data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, - platform=params.get('platform', None), pages_only=True) - elif widget in ['resources_loading_time']: - data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, - platform=params.get('platform', None), performance=False) - elif widget in ['time_between_events', 'events']: - data = dashboard.search(params.get('q', ''), params.get('type', ''), project_id=projectId, - platform=params.get('platform', None), performance=False, events_only=True) - elif widget in ['metadata']: - data = dashboard.search(params.get('q', ''), None, project_id=projectId, - platform=params.get('platform', None), metadata=True, key=params.get("key")) - else: - return {"errors": [f"unsupported widget: {widget}"]} - return {'data': data} - - -# 1 -@app.route('/{projectId}/dashboard/slowest_resources', methods=['GET', 'POST']) -def get_dashboard_slowest_resources(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_slowest_resources(project_id=projectId, **{**data, **args})} - - -# 2 -@app.route('/{projectId}/dashboard/resources_loading_time', methods=['GET', 'POST']) -def get_dashboard_resources(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_resources_loading_time(project_id=projectId, **{**data, **args})} - - -# 3 -@app.route('/{projectId}/dashboard/pages_dom_buildtime', methods=['GET', 'POST']) -def get_dashboard_pages_dom(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_pages_dom_build_time(project_id=projectId, **{**data, **args})} - - -# 4 -@app.route('/{projectId}/dashboard/busiest_time_of_day', methods=['GET', 'POST']) -def get_dashboard_busiest_time_of_day(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_busiest_time_of_day(project_id=projectId, **{**data, **args})} - - -# 5 -@app.route('/{projectId}/dashboard/sessions_location', methods=['GET', 'POST']) -def get_dashboard_sessions_location(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_sessions_location(project_id=projectId, **{**data, **args})} - - -# 6 -@app.route('/{projectId}/dashboard/speed_location', methods=['GET', 'POST']) -def get_dashboard_speed_location(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_speed_index_location(project_id=projectId, **{**data, **args})} - - -# 7 -@app.route('/{projectId}/dashboard/pages_response_time', methods=['GET', 'POST']) -def get_dashboard_pages_response_time(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_pages_response_time(project_id=projectId, **{**data, **args})} - - -# 8 -@app.route('/{projectId}/dashboard/pages_response_time_distribution', methods=['GET', 'POST']) -def get_dashboard_pages_response_time_distribution(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_pages_response_time_distribution(project_id=projectId, **{**data, **args})} - - -# 9 -@app.route('/{projectId}/dashboard/top_metrics', methods=['GET', 'POST']) -def get_dashboard_top_metrics(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_top_metrics(project_id=projectId, **{**data, **args})} - - -# 10 -@app.route('/{projectId}/dashboard/time_to_render', methods=['GET', 'POST']) -def get_dashboard_time_to_render(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_time_to_render(project_id=projectId, **{**data, **args})} - - -# 11 -@app.route('/{projectId}/dashboard/impacted_sessions_by_slow_pages', methods=['GET', 'POST']) -def get_dashboard_impacted_sessions_by_slow_pages(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_impacted_sessions_by_slow_pages(project_id=projectId, **{**data, **args})} - - -# 12 -@app.route('/{projectId}/dashboard/memory_consumption', methods=['GET', 'POST']) -def get_dashboard_memory_consumption(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_memory_consumption(project_id=projectId, **{**data, **args})} - - -# 12.1 -@app.route('/{projectId}/dashboard/fps', methods=['GET', 'POST']) -def get_dashboard_avg_fps(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_avg_fps(project_id=projectId, **{**data, **args})} - - -# 12.2 -@app.route('/{projectId}/dashboard/cpu', methods=['GET', 'POST']) -def get_dashboard_avg_cpu(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_avg_cpu(project_id=projectId, **{**data, **args})} - - -# 13 -@app.route('/{projectId}/dashboard/crashes', methods=['GET', 'POST']) -def get_dashboard_impacted_sessions_by_slow_pages(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_crashes(project_id=projectId, **{**data, **args})} - - -# 14 -@app.route('/{projectId}/dashboard/domains_errors', methods=['GET', 'POST']) -def get_dashboard_domains_errors(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_domains_errors(project_id=projectId, **{**data, **args})} - - -# 14.1 -@app.route('/{projectId}/dashboard/domains_errors_4xx', methods=['GET', 'POST']) -def get_dashboard_domains_errors_4xx(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_domains_errors_4xx(project_id=projectId, **{**data, **args})} - - -# 14.2 -@app.route('/{projectId}/dashboard/domains_errors_5xx', methods=['GET', 'POST']) -def get_dashboard_domains_errors_5xx(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_domains_errors_5xx(project_id=projectId, **{**data, **args})} - - -# 15 -@app.route('/{projectId}/dashboard/slowest_domains', methods=['GET', 'POST']) -def get_dashboard_slowest_domains(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_slowest_domains(project_id=projectId, **{**data, **args})} - - -# 16 -@app.route('/{projectId}/dashboard/errors_per_domains', methods=['GET', 'POST']) -def get_dashboard_errors_per_domains(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_errors_per_domains(project_id=projectId, **{**data, **args})} - - -# 17 -@app.route('/{projectId}/dashboard/sessions_per_browser', methods=['GET', 'POST']) -def get_dashboard_sessions_per_browser(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_sessions_per_browser(project_id=projectId, **{**data, **args})} - - -# 18 -@app.route('/{projectId}/dashboard/calls_errors', methods=['GET', 'POST']) -def get_dashboard_calls_errors(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_calls_errors(project_id=projectId, **{**data, **args})} - - -# 18.1 -@app.route('/{projectId}/dashboard/calls_errors_4xx', methods=['GET', 'POST']) -def get_dashboard_calls_errors_4xx(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_calls_errors_4xx(project_id=projectId, **{**data, **args})} - - -# 18.2 -@app.route('/{projectId}/dashboard/calls_errors_5xx', methods=['GET', 'POST']) -def get_dashboard_calls_errors_5xx(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_calls_errors_5xx(project_id=projectId, **{**data, **args})} - - -# 19 -@app.route('/{projectId}/dashboard/errors_per_type', methods=['GET', 'POST']) -def get_dashboard_errors_per_type(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_errors_per_type(project_id=projectId, **{**data, **args})} - - -# 20 -@app.route('/{projectId}/dashboard/resources_by_party', methods=['GET', 'POST']) -def get_dashboard_resources_by_party(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_resources_by_party(project_id=projectId, **{**data, **args})} - - -# 21 -@app.route('/{projectId}/dashboard/resource_type_vs_response_end', methods=['GET', 'POST']) -def get_dashboard_errors_per_resource_type(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.resource_type_vs_response_end(project_id=projectId, **{**data, **args})} - - -# 22 -@app.route('/{projectId}/dashboard/resources_vs_visually_complete', methods=['GET', 'POST']) -def get_dashboard_resources_vs_visually_complete(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_resources_vs_visually_complete(project_id=projectId, **{**data, **args})} - - -# 23 -@app.route('/{projectId}/dashboard/impacted_sessions_by_js_errors', methods=['GET', 'POST']) -def get_dashboard_impacted_sessions_by_js_errors(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_impacted_sessions_by_js_errors(project_id=projectId, **{**data, **args})} - - -# 24 -@app.route('/{projectId}/dashboard/resources_count_by_type', methods=['GET', 'POST']) -def get_dashboard_resources_count_by_type(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": dashboard.get_resources_count_by_type(project_id=projectId, **{**data, **args})} - - -# 25 -@app.route('/{projectId}/dashboard/time_between_events', methods=['GET']) -def get_dashboard_resources_count_by_type(projectId, context): - return {"errors": ["please choose 2 events"]} - - -@app.route('/{projectId}/dashboard/overview', methods=['GET', 'POST']) -def get_dashboard_group(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": [ - *helper.explode_widget(key="count_sessions", - data=dashboard.get_processed_sessions(project_id=projectId, **{**data, **args})), - *helper.explode_widget(data={**dashboard.get_application_activity(project_id=projectId, **{**data, **args}), - "chart": dashboard.get_performance(project_id=projectId, **{**data, **args}) - .get("chart", [])}), - *helper.explode_widget(data=dashboard.get_page_metrics(project_id=projectId, **{**data, **args})), - *helper.explode_widget(data=dashboard.get_user_activity(project_id=projectId, **{**data, **args})), - *helper.explode_widget(data=dashboard.get_pages_dom_build_time(project_id=projectId, **{**data, **args}), - key="avg_pages_dom_buildtime"), - *helper.explode_widget(data=dashboard.get_pages_response_time(project_id=projectId, **{**data, **args}), - key="avg_pages_response_time"), - *helper.explode_widget(dashboard.get_top_metrics(project_id=projectId, **{**data, **args})), - *helper.explode_widget(data=dashboard.get_time_to_render(project_id=projectId, **{**data, **args}), - key="avg_time_to_render"), - *helper.explode_widget(dashboard.get_memory_consumption(project_id=projectId, **{**data, **args})), - *helper.explode_widget(dashboard.get_avg_cpu(project_id=projectId, **{**data, **args})), - *helper.explode_widget(dashboard.get_avg_fps(project_id=projectId, **{**data, **args})), - ]} - - -@app.route('/{projectId}/dashboard/errors_crashes', methods=['GET', 'POST']) -def get_dashboard_group(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": [ - {"key": "errors", - "data": dashboard.get_errors(project_id=projectId, **{**data, **args})}, - {"key": "errors_trend", - "data": dashboard.get_errors_trend(project_id=projectId, **{**data, **args})}, - {"key": "crashes", - "data": dashboard.get_crashes(project_id=projectId, **{**data, **args})}, - {"key": "domains_errors", - "data": dashboard.get_domains_errors(project_id=projectId, **{**data, **args})}, - {"key": "errors_per_domains", - "data": dashboard.get_errors_per_domains(project_id=projectId, **{**data, **args})}, - {"key": "calls_errors", - "data": dashboard.get_calls_errors(project_id=projectId, **{**data, **args})}, - {"key": "errors_per_type", - "data": dashboard.get_errors_per_type(project_id=projectId, **{**data, **args})}, - {"key": "impacted_sessions_by_js_errors", - "data": dashboard.get_impacted_sessions_by_js_errors(project_id=projectId, **{**data, **args})} - ]} - - -@app.route('/{projectId}/dashboard/resources', methods=['GET', 'POST']) -def get_dashboard_group(projectId, context): - data = app.current_request.json_body - if data is None: - data = {} - params = app.current_request.query_params - args = dashboard.dashboard_args(params) - - return {"data": [ - {"key": "slowest_images", - "data": dashboard.get_slowest_images(project_id=projectId, **{**data, **args})}, - {"key": "missing_resources", - "data": dashboard.get_missing_resources_trend(project_id=projectId, **{**data, **args})}, - {"key": "slowest_resources", - "data": dashboard.get_slowest_resources(project_id=projectId, type='all', **{**data, **args})}, - {"key": "resources_loading_time", - "data": dashboard.get_resources_loading_time(project_id=projectId, **{**data, **args})}, - {"key": "resources_by_party", - "data": dashboard.get_resources_by_party(project_id=projectId, **{**data, **args})}, - {"key": "resource_type_vs_response_end", - "data": dashboard.resource_type_vs_response_end(project_id=projectId, **{**data, **args})}, - {"key": "resources_vs_visually_complete", - "data": dashboard.get_resources_vs_visually_complete(project_id=projectId, **{**data, **args})}, - {"key": "resources_count_by_type", - "data": dashboard.get_resources_count_by_type(project_id=projectId, **{**data, **args})} - ]} diff --git a/ee/api/chalicelib/core/authorizers.py b/ee/api/chalicelib/core/authorizers.py index ea326c2a1..149d570ab 100644 --- a/ee/api/chalicelib/core/authorizers.py +++ b/ee/api/chalicelib/core/authorizers.py @@ -1,10 +1,10 @@ import jwt +from decouple import config from chalicelib.core import tenants from chalicelib.core import users from chalicelib.utils import helper from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.helper import environ def jwt_authorizer(token): @@ -14,8 +14,8 @@ def jwt_authorizer(token): try: payload = jwt.decode( token[1], - environ["jwt_secret"], - algorithms=environ["jwt_algorithm"], + config("jwt_secret"), + algorithms=config("jwt_algorithm"), audience=[f"plugin:{helper.get_stage_name()}", f"front:{helper.get_stage_name()}"] ) except jwt.ExpiredSignatureError: @@ -43,14 +43,14 @@ def generate_jwt(id, tenant_id, iat, aud, exp=None): payload={ "userId": id, "tenantId": tenant_id, - "exp": iat // 1000 + int(environ["jwt_exp_delta_seconds"]) + TimeUTC.get_utc_offset() // 1000 \ - if exp is None else exp + TimeUTC.get_utc_offset() // 1000, - "iss": environ["jwt_issuer"], + "exp": iat // 1000 + int(config("jwt_exp_delta_seconds")) + TimeUTC.get_utc_offset() // 1000 \ + if exp is None else exp+ TimeUTC.get_utc_offset() // 1000, + "iss": config("jwt_issuer"), "iat": iat // 1000, "aud": aud }, - key=environ["jwt_secret"], - algorithm=environ["jwt_algorithm"] + key=config("jwt_secret"), + algorithm=config("jwt_algorithm") ) return token.decode("utf-8") diff --git a/ee/api/chalicelib/core/insights.py b/ee/api/chalicelib/core/insights.py new file mode 100644 index 000000000..387029fd4 --- /dev/null +++ b/ee/api/chalicelib/core/insights.py @@ -0,0 +1,1047 @@ +from chalicelib.core import sessions_metas +from chalicelib.utils import helper, dev +from chalicelib.utils import ch_client +from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.core.dashboard import __get_constraint_values, __complete_missing_steps +from chalicelib.core.dashboard import __get_basic_constraints, __get_meta_constraint + + +def __transform_journey(rows): + nodes = [] + links = [] + for r in rows: + source = r["source_event"][r["source_event"].index("_") + 1:] + target = r["target_event"][r["target_event"].index("_") + 1:] + if source not in nodes: + nodes.append(source) + if target not in nodes: + nodes.append(target) + links.append({"source": nodes.index(source), "target": nodes.index(target), "value": r["value"]}) + return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)} + + +JOURNEY_DEPTH = 5 +JOURNEY_TYPES = { + "PAGES": {"table": "pages", "column": "url_path"}, + "CLICK": {"table": "clicks", "column": "label"}, + # "VIEW": {"table": "events_ios.views", "column": "name"}, TODO: enable this for SAAS only + "EVENT": {"table": "customs", "column": "name"} +} + + +@dev.timed +def journey(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], **args): + event_start = None + event_table = JOURNEY_TYPES["CLICK"]["table"] + event_column = JOURNEY_TYPES["CLICK"]["column"] + extra_values = {} + meta_condition = [] + for f in filters: + if f["type"] == "START_POINT": + event_start = f["value"] + elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append(f"sessions_metadata.project_id = %(project_id)s") + meta_condition.append(f"sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") + meta_condition.append(f"sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000)") + extra_values["user_id"] = f["value"] + ch_sub_query = __get_basic_constraints(table_name=event_table, data=args) + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT source_event, + target_event, + count(*) AS value + FROM (SELECT toString(event_number) || '_' || value AS target_event, + lagInFrame(toString(event_number) || '_' || value) OVER (PARTITION BY session_rank ORDER BY datetime ASC ROWS + BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS source_event + FROM (SELECT session_rank, + datetime, + value, + row_number AS event_number + FROM (SELECT session_rank, + groupArray(datetime) AS arr_datetime, + groupArray(value) AS arr_value, + arrayEnumerate(arr_datetime) AS row_number + {f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY datetime ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN datetime ELSE NULL END as mark" if event_start else ""} + FROM (SELECT session_id, + datetime, + value, + SUM(new_session) OVER (ORDER BY session_id, datetime) AS session_rank + FROM (SELECT *, + if(equals(source_timestamp, '1970-01-01'), 1, 0) AS new_session + FROM (SELECT session_id, + datetime, + {event_column} AS value, + lagInFrame(datetime) OVER (PARTITION BY session_id ORDER BY datetime ASC ROWS + BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS source_timestamp + FROM {event_table} {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + ORDER BY session_id, datetime) AS related_events) AS ranked_events + ORDER BY session_rank, datetime + ) AS processed + {") AS marked) AS maxed WHERE datetime >= max) AS filtered" if event_start else ""} + GROUP BY session_rank + ORDER BY session_rank) + ARRAY JOIN + arr_datetime AS datetime, + arr_value AS value, + row_number + ORDER BY session_rank ASC, + row_number ASC) AS sorted_events + WHERE event_number <= %(JOURNEY_DEPTH)s) AS final + WHERE not empty(source_event) + AND not empty(target_event) + GROUP BY source_event, target_event + ORDER BY value DESC + LIMIT 20;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, + **__get_constraint_values(args), **extra_values} + + rows = ch.execute(query=ch_query, params=params) + # print(ch_query % params) + return __transform_journey(rows) + + +def __compute_weekly_percentage(rows): + if rows is None or len(rows) == 0: + return rows + t = -1 + for r in rows: + if r["week"] == 0: + t = r["usersCount"] + r["percentage"] = r["usersCount"] / t + return rows + + +def __complete_retention(rows, start_date, end_date=None): + if rows is None: + return [] + max_week = 10 + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + if i < len(rows) \ + and i != rows[i]["week"]: + rows.insert(i, neutral) + elif i >= len(rows): + rows.append(neutral) + return rows + + +def __complete_acquisition(rows, start_date, end_date=None): + if rows is None: + return [] + max_week = 10 + week = 0 + delta_date = 0 + while max_week > 0: + start_date += TimeUTC.MS_WEEK + if end_date is not None and start_date >= end_date: + break + delta = 0 + if delta_date + week >= len(rows) \ + or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date: + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + rows.insert(delta_date + week + i, neutral) + delta = i + else: + for i in range(max_week): + if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: + break + + neutral = { + "firstConnexionWeek": start_date, + "week": i, + "usersCount": 0, + "connectedUsers": [], + "percentage": 0 + } + if delta_date + week + i < len(rows) \ + and i != rows[delta_date + week + i]["week"]: + rows.insert(delta_date + week + i, neutral) + elif delta_date + week + i >= len(rows): + rows.append(neutral) + delta = i + week += delta + max_week -= 1 + delta_date += 1 + return rows + + +@dev.timed +def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], + **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + ch_sub_query = __get_basic_constraints(table_name='sessions_metadata', data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") + ch_sub_query.append("not empty(sessions_metadata.user_id)") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, + COUNT(all_connexions.user_id) AS users_count, + groupArray(100)(all_connexions.user_id) AS connected_users + FROM (SELECT DISTINCT user_id + FROM sessions_metadata + WHERE {" AND ".join(ch_sub_query)} + AND toStartOfWeek(sessions_metadata.datetime,1) = toDate(%(startTimestamp)s / 1000) + AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND isNull((SELECT 1 + FROM sessions_metadata AS bmsess + WHERE bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bmsess.project_id = %(project_id)s + AND bmsess.user_id = sessions_metadata.user_id + LIMIT 1)) + ) AS users_list + INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week + FROM sessions_metadata + WHERE {" AND ".join(ch_sub_query)} + ) AS all_connexions USING (user_id) + GROUP BY connexion_week + ORDER BY connexion_week;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + # print(ch_query % params) + rows = ch.execute(ch_query, params) + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + ch_sub_query = __get_basic_constraints(table_name='sessions_metadata', data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") + ch_sub_query.append("not empty(sessions_metadata.user_id)") + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, + week, + users_count, + connected_users + FROM ( + SELECT first_connexion_week, + toInt8((connexion_week - first_connexion_week) / 7) AS week, + COUNT(DISTINCT all_connexions.user_id) AS users_count, + groupArray(20)(all_connexions.user_id) AS connected_users + FROM (SELECT user_id, MIN(toStartOfWeek(sessions_metadata.datetime, 1)) AS first_connexion_week + FROM sessions_metadata + WHERE {" AND ".join(ch_sub_query)} + AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND isNull((SELECT 1 + FROM sessions_metadata AS bmsess + WHERE bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bmsess.project_id = %(project_id)s + AND bmsess.user_id = sessions_metadata.user_id + LIMIT 1)) + GROUP BY user_id) AS users_list + INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week + FROM sessions_metadata + WHERE {" AND ".join(ch_sub_query)} + ORDER BY connexion_week, user_id + ) AS all_connexions USING (user_id) + WHERE first_connexion_week <= connexion_week + GROUP BY first_connexion_week, week + ORDER BY first_connexion_week, week + ) AS full_data;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args)} + # print(ch_query % params) + rows = ch.execute(ch_query, params) + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + ch_sub_query = __get_basic_constraints(table_name='feature', data=args) + meta_condition = __get_meta_constraint(args) + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + + with ch_client.ClickHouseClient() as ch: + if default: + # get most used value + ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature + {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(ch_query% params) + row = ch.execute(ch_query, params) + if len(row) > 0: + event_value = row[0]["value"] + else: + print(f"no {event_table} most used value") + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": ""}], + "chart": __complete_retention(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) + } + extra_values["value"] = event_value + if len(meta_condition) == 0: + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + ch_sub_query += meta_condition + ch_sub_query.append(f"feature.{event_column} = %(value)s") + ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, + COUNT(DISTINCT all_connexions.user_id) AS users_count, + groupArray(100)(all_connexions.user_id) AS connected_users + FROM (SELECT DISTINCT user_id + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND toStartOfWeek(feature.datetime,1) = toDate(%(startTimestamp)s / 1000) + AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND isNull((SELECT 1 + FROM {event_table} AS bsess INNER JOIN sessions_metadata AS bmsess USING (session_id) + WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bsess.project_id = %(project_id)s + AND bmsess.project_id = %(project_id)s + AND bmsess.user_id = sessions_metadata.user_id + AND bsess.{event_column}=%(value)s + LIMIT 1)) + ) AS users_list + INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + ORDER BY connexion_week, user_id + ) AS all_connexions USING (user_id) + GROUP BY connexion_week + ORDER BY connexion_week;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(ch_query % params) + rows = ch.execute(ch_query, params) + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + ch_sub_query = __get_basic_constraints(table_name='feature', data=args) + meta_condition = __get_meta_constraint(args) + + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + with ch_client.ClickHouseClient() as ch: + if default: + # get most used value + ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature + {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(ch_query% params) + row = ch.execute(ch_query, params) + if len(row) > 0: + event_value = row[0]["value"] + else: + print(f"no {event_table} most used value") + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": ""}], + "chart": __complete_acquisition(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) + } + extra_values["value"] = event_value + + if len(meta_condition) == 0: + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + + ch_sub_query += meta_condition + ch_sub_query.append(f"feature.{event_column} = %(value)s") + ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, + week, + users_count, + connected_users + FROM ( + SELECT first_connexion_week, + toInt8((connexion_week - first_connexion_week) / 7) AS week, + COUNT(DISTINCT all_connexions.user_id) AS users_count, + groupArray(100)(all_connexions.user_id) AS connected_users + FROM (SELECT user_id, MIN(toStartOfWeek(feature.datetime, 1)) AS first_connexion_week + FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) + AND isNull((SELECT 1 + FROM sessions_metadata AS bmsess + INNER JOIN {event_table} AS bsess USING (session_id) + WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) + AND bsess.project_id = %(project_id)s + AND bmsess.project_id = %(project_id)s + AND bmsess.user_id = sessions_metadata.user_id + AND bsess.{event_column} = %(value)s + LIMIT 1)) + GROUP BY user_id) AS users_list + INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week + FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + ORDER BY connexion_week, user_id + ) AS all_connexions USING (user_id) + WHERE first_connexion_week <= connexion_week + GROUP BY first_connexion_week, week + ORDER BY first_connexion_week, week + ) AS full_data;""" + + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(ch_query % params) + rows = ch.execute(ch_query, params) + rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) + } + + +@dev.timed +def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + startTimestamp = TimeUTC.trunc_week(startTimestamp) + endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK + ch_sub_query = __get_basic_constraints(table_name='feature', data=args) + meta_condition = __get_meta_constraint(args) + + event_table = JOURNEY_TYPES["CLICK"]["table"] + event_column = JOURNEY_TYPES["CLICK"]["column"] + extra_values = {} + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + extra_values["user_id"] = f["value"] + + with ch_client.ClickHouseClient() as ch: + if len(meta_condition) == 0: + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + ch_sub_query += meta_condition + ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions_metadata + WHERE {" AND ".join(meta_condition)};""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(ch_query % params) + # print("---------------------") + all_user_count = ch.execute(ch_query, params) + if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: + return [] + all_user_count = all_user_count[0]["count"] + ch_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND length({event_column})>2 + GROUP BY value + ORDER BY count DESC + LIMIT 7;""" + + # print(ch_query % params) + # print("---------------------") + popularity = ch.execute(ch_query, params) + params["values"] = [p["value"] for p in popularity] + if len(params["values"]) == 0: + return [] + ch_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + AND {event_column} IN %(values)s + GROUP BY value;""" + + # print(ch_query % params) + # print("---------------------") + frequencies = ch.execute(ch_query, params) + total_usage = sum([f["count"] for f in frequencies]) + frequencies = {f["value"]: f["count"] for f in frequencies} + for p in popularity: + p["popularity"] = p.pop("count") / all_user_count + p["frequency"] = frequencies[p["value"]] / total_usage + + return popularity + + +@dev.timed +def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + meta_condition = [] + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + + ch_sub_query = __get_basic_constraints(table_name='feature', data=args) + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + with ch_client.ClickHouseClient() as ch: + if default: + # get most used value + ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature + {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(ch_query % params) + # print("---------------------") + row = ch.execute(ch_query, params) + if len(row) > 0: + event_value = row[0]["value"] + # else: + # print(f"no {event_table} most used value") + # return {"target": 0, "adoption": 0, + # "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": ""}]} + + extra_values["value"] = event_value + + if len(meta_condition) == 0: + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + meta_condition.append("sessions_metadata.user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + ch_sub_query += meta_condition + ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM sessions_metadata + WHERE {" AND ".join(meta_condition)};""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(ch_query % params) + # print("---------------------") + all_user_count = ch.execute(ch_query, params) + if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: + return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": event_value}], } + all_user_count = all_user_count[0]["count"] + + ch_sub_query.append(f"feature.{event_column} = %(value)s") + ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)};""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(ch_query % params) + # print("---------------------") + adoption = ch.execute(ch_query, params) + adoption = adoption[0]["count"] / all_user_count + return {"target": all_user_count, "adoption": adoption, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + +@dev.timed +def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + meta_condition = [] + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("user_id IS NOT NULL") + meta_condition.append("not empty(sessions_metadata.user_id)") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + ch_sub_query = __get_basic_constraints(table_name='feature', data=args) + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + + with ch_client.ClickHouseClient() as ch: + if default: + # get most used value + ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature + {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + row = ch.execute(ch_query, params) + if len(row) > 0: + event_value = row[0]["value"] + else: + print(f"no {event_table} most used value") + return {"users": [], + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": ""}]} + + extra_values["value"] = event_value + if len(meta_condition) == 0: + ch_sub_query.append("user_id IS NOT NULL") + ch_sub_query.append("not empty(sessions_metadata.user_id)") + ch_sub_query.append("sessions_metadata.project_id = %(project_id)s") + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + ch_sub_query.append(f"feature.{event_column} = %(value)s") + ch_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + GROUP BY user_id + ORDER BY count DESC + LIMIT 10;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(ch_query % params) + rows = ch.execute(ch_query, params) + return {"users": helper.list_to_camel_case(rows), + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + +@dev.timed +def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), + filters=[], **args): + event_type = "CLICK" + event_value = '/' + extra_values = {} + default = True + meta_condition = [] + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + ch_sub_query = __get_basic_constraints(table_name="feature", data=args) + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + with ch_client.ClickHouseClient() as ch: + if default: + # get most used value + ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + AND length({event_column}) > 2 + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(ch_query % params) + row = ch.execute(ch_query, params) + if len(row) > 0: + event_value = row[0]["value"] + else: + print(f"no {event_table} most used value") + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": ""}], + "chart": __complete_acquisition(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) + } + extra_values["value"] = event_value + ch_sub_query.append(f"feature.{event_column} = %(value)s") + ch_query = f"""SELECT toUnixTimestamp(day)*1000 AS timestamp, count + FROM (SELECT toStartOfDay(feature.datetime) AS day, COUNT(DISTINCT session_id) AS count + FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + GROUP BY day + ORDER BY day) AS raw_results;""" + params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(ch_query % params) + rows = ch.execute(ch_query, params) + return {"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, + density=(endTimestamp - startTimestamp) // TimeUTC.MS_DAY, + neutral={"count": 0}), + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} + + +@dev.timed +def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], + **args): + event_table = JOURNEY_TYPES["CLICK"]["table"] + event_column = JOURNEY_TYPES["CLICK"]["column"] + extra_values = {} + meta_condition = [] + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_table = JOURNEY_TYPES[f["value"]]["table"] + event_column = JOURNEY_TYPES[f["value"]]["column"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + extra_values["user_id"] = f["value"] + ch_sub_query = __get_basic_constraints(table_name="feature", data=args) + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg + FROM {event_table} AS feature + {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + GROUP BY value + ORDER BY avg DESC + LIMIT 7;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + # print(ch_query % params) + rows = ch.execute(ch_query, params) + + return rows + + +PERIOD_TO_FUNCTION = { + "DAY": "toStartOfDay", + "WEEK": "toStartOfWeek" +} + + +@dev.timed +def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], + **args): + meta_condition = __get_meta_constraint(args) + period = "DAY" + extra_values = {} + for f in filters: + if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: + period = f["value"] + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + extra_values["user_id"] = f["value"] + period_function = PERIOD_TO_FUNCTION[period] + ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) + ch_sub_query += meta_condition + ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") + ch_sub_query.append("not empty(sessions_metadata.user_id)") + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT SUM(count) / intDiv(%(endTimestamp)s - %(startTimestamp)s, %(step_size)s) AS avg + FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count + FROM sessions_metadata + WHERE {" AND ".join(ch_sub_query)} + GROUP BY period) AS daily_users;""" + params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, + "project_id": project_id, + "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( + startTimestamp), "endTimestamp": endTimestamp, **__get_constraint_values(args), + **extra_values} + # print(ch_query % params) + # print("---------------------") + avg = ch.execute(ch_query, params) + if len(avg) == 0 or avg[0]["avg"] == 0: + return {"avg": 0, "chart": []} + avg = avg[0]["avg"] + # TODO: optimize this when DB structure changes, optimization from 3s to 1s + ch_query = f"""SELECT toUnixTimestamp(toDateTime(period))*1000 AS timestamp, count + FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count + FROM sessions_metadata + WHERE {" AND ".join(ch_sub_query)} + GROUP BY period + ORDER BY period) AS raw_results;""" + # print(ch_query % params) + # print("---------------------") + rows = ch.execute(ch_query, params) + return {"avg": avg, "chart": rows} + + +@dev.timed +def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): + ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") + ch_sub_query.append("not empty(sessions_metadata.user_id)") + + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT ifNotFinite(AVG(count),0) AS avg + FROM(SELECT COUNT(user_id) AS count + FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days + FROM sessions_metadata + WHERE {" AND ".join(ch_sub_query)} + GROUP BY user_id) AS users_connexions + GROUP BY number_of_days + ORDER BY number_of_days) AS results;""" + params = {"project_id": project_id, + "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} + # print(ch_query % params) + # print("---------------------") + avg = ch.execute(ch_query, params) + if len(avg) == 0 or avg[0]["avg"] == 0: + return {"avg": 0, "partition": []} + avg = avg[0]["avg"] + ch_query = f"""SELECT number_of_days, COUNT(user_id) AS count + FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days + FROM sessions_metadata + WHERE {" AND ".join(ch_sub_query)} + GROUP BY user_id) AS users_connexions + GROUP BY number_of_days + ORDER BY number_of_days;""" + + # print(ch_query % params) + # print("---------------------") + rows = ch.execute(ch_query, params) + + return {"avg": avg, "partition": helper.list_to_camel_case(rows)} + + +@dev.timed +def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], + **args): + ch_sub_query = __get_basic_constraints(table_name="feature", data=args) + event_type = "PAGES" + event_value = "/" + extra_values = {} + default = True + meta_condition = [] + for f in filters: + if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): + event_type = f["value"] + elif f["type"] == "EVENT_VALUE": + event_value = f["value"] + default = False + elif f["type"] in [sessions_metas.meta_type.USERID, sessions_metas.meta_type.USERID_IOS]: + meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") + meta_condition.append("sessions_metadata.project_id = %(project_id)s") + meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + extra_values["user_id"] = f["value"] + event_table = JOURNEY_TYPES[event_type]["table"] + event_column = JOURNEY_TYPES[event_type]["column"] + + meta_condition += __get_meta_constraint(args) + ch_sub_query += meta_condition + with ch_client.ClickHouseClient() as ch: + if default: + # get most used value + ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count + FROM {event_table} AS feature + {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} + WHERE {" AND ".join(ch_sub_query)} + GROUP BY value + ORDER BY count DESC + LIMIT 1;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(ch_query % params) + row = ch.execute(ch_query, params) + if len(row) > 0: + event_value = row[0]["value"] + else: + print(f"no {event_table} most used value") + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, + {"type": "EVENT_VALUE", "value": ""}], + "list": [] + } + extra_values["value"] = event_value + if len(meta_condition) == 0: + ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") + ch_sub_query.append("not empty(sessions_metadata.user_id)") + ch_sub_query.append("sessions_metadata.project_id = %(project_id)s") + ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") + ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") + ch_sub_query.append(f"feature.{event_column} = %(value)s") + ch_query = f"""SELECT user_id, + toUnixTimestamp(last_time)*1000 AS last_time, + interactions_count, + toUnixTimestamp(first_seen) * 1000 AS first_seen, + toUnixTimestamp(last_seen) * 1000 AS last_seen + FROM (SELECT user_id, last_time, interactions_count, MIN(datetime) AS first_seen, MAX(datetime) AS last_seen + FROM (SELECT user_id, MAX(datetime) AS last_time, COUNT(DISTINCT session_id) AS interactions_count + FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) + WHERE {" AND ".join(ch_sub_query)} + GROUP BY user_id ) AS user_last_usage INNER JOIN sessions_metadata USING (user_id) + WHERE now() - last_time > 7 + GROUP BY user_id, last_time, interactions_count + ORDER BY interactions_count DESC, last_time DESC + LIMIT 50) AS raw_results;""" + params = {"project_id": project_id, "startTimestamp": startTimestamp, + "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} + print(ch_query % params) + rows = ch.execute(ch_query, params) + return { + "startTimestamp": startTimestamp, + "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], + "list": helper.list_to_camel_case(rows) + } + + +@dev.timed +def search(text, feature_type, project_id, platform=None): + if not feature_type: + resource_type = "ALL" + data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform) + return data + args = {} if platform is None else {"platform": platform} + ch_sub_query = __get_basic_constraints(table_name="feature", data=args) + meta_condition = __get_meta_constraint(args) + ch_sub_query += meta_condition + params = {"startTimestamp": TimeUTC.now() - 1 * TimeUTC.MS_MONTH, + "endTimestamp": TimeUTC.now(), + "project_id": project_id, + "value": text.lower(), + "platform_0": platform} + if feature_type == "ALL": + with ch_client.ClickHouseClient() as ch: + sub_queries = [] + for e in JOURNEY_TYPES: + sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" + FROM {JOURNEY_TYPES[e]["table"]} AS feature + WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[e]["column"]},%(value)s)!=0 + LIMIT 10)""") + ch_query = "UNION ALL".join(sub_queries) + print(ch_query % params) + rows = ch.execute(ch_query, params) + elif JOURNEY_TYPES.get(feature_type) is not None: + with ch_client.ClickHouseClient() as ch: + ch_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" + FROM {JOURNEY_TYPES[feature_type]["table"]} AS feature + WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[feature_type]["column"]},%(value)s)!=0 + LIMIT 10;""" + print(ch_query % params) + rows = ch.execute(ch_query, params) + else: + return [] + return [helper.dict_to_camel_case(row) for row in rows] \ No newline at end of file diff --git a/ee/api/chalicelib/core/license.py b/ee/api/chalicelib/core/license.py index 905c4f2ec..2423567de 100644 --- a/ee/api/chalicelib/core/license.py +++ b/ee/api/chalicelib/core/license.py @@ -1,6 +1,7 @@ -from chalicelib.utils.helper import environ -from chalicelib.utils import pg_client +from decouple import config + from chalicelib.core import unlock +from chalicelib.utils import pg_client def get_status(tenant_id): @@ -16,7 +17,7 @@ def get_status(tenant_id): "versionNumber": r.get("version_number", ""), "license": license[0:2] + "*" * (len(license) - 4) + license[-2:], "expirationDate": unlock.get_expiration_date(), - "teamMember": int(environ.get("numberOfSeats", 0)) + "teamMember": config("numberOfSeats", cast=int, default=0) }, "count": { "teamMember": r.get("t_users"), diff --git a/ee/api/chalicelib/core/metadata.py b/ee/api/chalicelib/core/metadata.py index 293a8cd4c..a308a9c7a 100644 --- a/ee/api/chalicelib/core/metadata.py +++ b/ee/api/chalicelib/core/metadata.py @@ -91,7 +91,8 @@ def delete(tenant_id, project_id, index: int): cur.execute(query=query) query = cur.mogrify(f"""UPDATE public.sessions SET {colname}= NULL - WHERE project_id = %(project_id)s""", + WHERE project_id = %(project_id)s + AND {colname} IS NOT NULL""", {"project_id": project_id}) cur.execute(query=query) diff --git a/ee/api/chalicelib/core/projects.py b/ee/api/chalicelib/core/projects.py index cb1e7b1de..915502700 100644 --- a/ee/api/chalicelib/core/projects.py +++ b/ee/api/chalicelib/core/projects.py @@ -106,10 +106,11 @@ def is_authorized(project_id, tenant_id): return get_project(tenant_id=tenant_id, project_id=project_id) is not None -def create(tenant_id, user_id, data): - admin = users.get(user_id=user_id, tenant_id=tenant_id) - if not admin["admin"] and not admin["superAdmin"]: - return {"errors": ["unauthorized"]} +def create(tenant_id, user_id, data, skip_authorization=False): + if not skip_authorization: + admin = users.get(user_id=user_id, tenant_id=tenant_id) + if not admin["admin"] and not admin["superAdmin"]: + return {"errors": ["unauthorized"]} return {"data": __create(tenant_id=tenant_id, name=data.get("name", "my first project"))} @@ -228,3 +229,25 @@ def update_capture_status(project_id, changes): ) return changes + + +def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None): + with pg_client.PostgresClient() as cur: + query = cur.mogrify(f"""\ + SELECT + s.project_key, + s.name + {",(SELECT max(ss.start_ts) FROM public.sessions AS ss WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at" if include_last_session else ""} + {',s.gdpr' if include_gdpr else ''} + FROM public.projects AS s + where s.project_key =%(project_key)s + AND s.tenant_id =%(tenant_id)s + AND s.deleted_at IS NULL + LIMIT 1;""", + {"project_key": project_key, "tenant_id": tenant_id}) + + cur.execute( + query=query + ) + row = cur.fetchone() + return helper.dict_to_camel_case(row) diff --git a/ee/api/chalicelib/core/reset_password.py b/ee/api/chalicelib/core/reset_password.py index e51816e85..d09db8c50 100644 --- a/ee/api/chalicelib/core/reset_password.py +++ b/ee/api/chalicelib/core/reset_password.py @@ -1,26 +1,27 @@ +import schemas from chalicelib.core import users from chalicelib.utils import email_helper, captcha, helper -def reset(data): +def reset(data: schemas.ForgetPasswordPayloadSchema): print("====================== reset password ===============") print(data) - if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]): + if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): print("error: Invalid captcha.") return {"errors": ["Invalid captcha."]} if "email" not in data: return {"errors": ["email not found in body"]} if not helper.has_smtp(): return {"errors": ["no SMTP configuration found, you can ask your admin to reset your password"]} - a_user = users.get_by_email_only(data["email"]) + a_user = users.get_by_email_only(data.email) if a_user is not None: # ---FOR SSO if a_user.get("origin") is not None and a_user.get("hasPassword", False) is False: return {"errors": ["Please use your SSO to login"]} # ---------- invitation_link = users.generate_new_invitation(user_id=a_user["id"]) - email_helper.send_forgot_password(recipient=data["email"], invitation_link=invitation_link) + email_helper.send_forgot_password(recipient=data.email, invitation_link=invitation_link) else: - print(f"invalid email address [{data['email']}]") + print(f"invalid email address [{data.email}]") return {"errors": ["invalid email address"]} return {"data": {"state": "success"}} diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index 4650736a5..aa4ba2af9 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -1,21 +1,24 @@ -from chalicelib.utils import helper -from chalicelib.utils import pg_client +import json + +from decouple import config + +import schemas from chalicelib.core import users, telemetry, tenants from chalicelib.utils import captcha -import json +from chalicelib.utils import helper +from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.helper import environ -def create_step1(data): +def create_step1(data: schemas.UserSignupSchema): print(f"===================== SIGNUP STEP 1 AT {TimeUTC.to_human_readable(TimeUTC.now())} UTC") errors = [] if tenants.tenants_exists(): - return {"errors":["tenants already registered"]} + return {"errors": ["tenants already registered"]} - email = data.get("email") + email = data.email print(f"=====================> {email}") - password = data.get("password") + password = data.password print("Verifying email validity") if email is None or len(email) < 5 or not helper.is_valid_email(email): @@ -28,25 +31,25 @@ def create_step1(data): errors.append("Email address previously deleted.") print("Verifying captcha") - if helper.allow_captcha() and not captcha.is_valid(data["g-recaptcha-response"]): + if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): errors.append("Invalid captcha.") print("Verifying password validity") - if len(data["password"]) < 6: + if len(password) < 6: errors.append("Password is too short, it must be at least 6 characters long.") print("Verifying fullname validity") - fullname = data.get("fullname") + fullname = data.fullname if fullname is None or len(fullname) < 1 or not helper.is_alphabet_space_dash(fullname): errors.append("Invalid full name.") print("Verifying company's name validity") - company_name = data.get("organizationName") + company_name = data.organizationName if company_name is None or len(company_name) < 1 or not helper.is_alphanumeric_space(company_name): errors.append("invalid organization's name") print("Verifying project's name validity") - project_name = data.get("projectName") + project_name = data.projectName if project_name is None or len(project_name) < 1: project_name = "my first project" @@ -60,7 +63,7 @@ def create_step1(data): params = {"email": email, "password": password, "fullname": fullname, "companyName": company_name, "projectName": project_name, - "versionNumber": environ["version_number"], + "versionNumber": config("version_number"), "data": json.dumps({"lastAnnouncementView": TimeUTC.now()})} query = """\ WITH t AS ( @@ -131,4 +134,4 @@ def create_step1(data): "user": r, "client": c, } - } \ No newline at end of file + } diff --git a/ee/api/chalicelib/core/tenants.py b/ee/api/chalicelib/core/tenants.py index eb827d827..45491f654 100644 --- a/ee/api/chalicelib/core/tenants.py +++ b/ee/api/chalicelib/core/tenants.py @@ -1,6 +1,6 @@ -from chalicelib.utils import pg_client -from chalicelib.utils import helper from chalicelib.core import users +from chalicelib.utils import helper +from chalicelib.utils import pg_client def get_by_tenant_key(tenant_key): diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py new file mode 100644 index 000000000..fd0ae6c2b --- /dev/null +++ b/ee/api/chalicelib/core/traces.py @@ -0,0 +1,157 @@ +import json +import queue +import re +from typing import Optional, List + +from decouple import config +from fastapi import Request, Response +from pydantic import BaseModel, Field +from starlette.background import BackgroundTask + +import app as main_app +from chalicelib.utils import pg_client +from chalicelib.utils.TimeUTC import TimeUTC +from schemas import CurrentContext + +IGNORE_ROUTES = [ + {"method": ["*"], "path": "/notifications"}, + {"method": ["*"], "path": "/announcements"}, + {"method": ["*"], "path": "/client"}, + {"method": ["*"], "path": "/account"}, + {"method": ["GET"], "path": "/projects"}, + {"method": ["*"], "path": "/{projectId}/sessions/search2"}, + {"method": ["GET"], "path": "/{projectId}/sessions2/favorite"}, + {"method": ["GET"], "path": re.compile("^/{projectId}/sessions2/{sessionId}/.*")}, + {"method": ["GET"], "path": "/{projectId}/sample_rate"}, + {"method": ["GET"], "path": "/boarding"}, + {"method": ["GET"], "path": "/{projectId}/metadata"}, + {"method": ["GET"], "path": "/{projectId}/integration/sources"}, + {"method": ["GET"], "path": "/{projectId}/funnels"}, + {"method": ["GET"], "path": "/integrations/slack/channels"}, + {"method": ["GET"], "path": "/webhooks"}, + {"method": ["GET"], "path": "/{projectId}/alerts"}, + {"method": ["GET"], "path": "/client/members"}, + {"method": ["GET"], "path": "/client/roles"}, + {"method": ["GET"], "path": "/announcements/view"}, + {"method": ["GET"], "path": "/config/weekly_report"}, + {"method": ["GET"], "path": "/{projectId}/events/search"}, + {"method": ["POST"], "path": "/{projectId}/errors/search"}, + {"method": ["GET"], "path": "/{projectId}/errors/stats"}, + {"method": ["GET"], "path": re.compile("^/{projectId}/errors/{errorId}/.*")}, + {"method": ["GET"], "path": re.compile("^/integrations/.*")}, + {"method": ["*"], "path": re.compile("^/{projectId}/dashboard/.*")}, + {"method": ["*"], "path": re.compile("^/{projectId}/funnels$")}, + {"method": ["*"], "path": re.compile("^/{projectId}/funnels/.*")}, +] +IGNORE_IN_PAYLOAD = ["token", "password", "authorizationToken", "authHeader", "xQueryKey", "awsSecretAccessKey", + "serviceAccountCredentials", "accessKey", "applicationKey", "apiKey"] + + +class TraceSchema(BaseModel): + user_id: Optional[int] = Field(None) + tenant_id: int = Field(...) + auth: Optional[str] = Field(None) + action: str = Field(...) + method: str = Field(...) + path_format: str = Field(...) + endpoint: str = Field(...) + payload: Optional[dict] = Field(None) + parameters: Optional[dict] = Field(None) + status: Optional[int] = Field(None) + created_at: int = Field(...) + + +def __process_trace(trace: TraceSchema): + data = trace.dict() + data["parameters"] = json.dumps(trace.parameters) if trace.parameters is not None and len( + trace.parameters.keys()) > 0 else None + data["payload"] = json.dumps(trace.payload) if trace.payload is not None and len(trace.payload.keys()) > 0 else None + return data + + +async def write_trace(trace: TraceSchema): + data = __process_trace(trace) + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify( + f"""INSERT INTO traces(user_id, tenant_id, created_at, auth, action, method, path_format, endpoint, payload, parameters, status) + VALUES (%(user_id)s, %(tenant_id)s, %(created_at)s, %(auth)s, %(action)s, %(method)s, %(path_format)s, %(endpoint)s, %(payload)s::jsonb, %(parameters)s::jsonb, %(status)s);""", + data) + ) + + +async def write_traces_batch(traces: List[TraceSchema]): + if len(traces) == 0: + return + params = {} + values = [] + for i, t in enumerate(traces): + data = __process_trace(t) + for key in data.keys(): + params[f"{key}_{i}"] = data[key] + values.append( + f"(%(user_id_{i})s, %(tenant_id_{i})s, %(created_at_{i})s, %(auth_{i})s, %(action_{i})s, %(method_{i})s, %(path_format_{i})s, %(endpoint_{i})s, %(payload_{i})s::jsonb, %(parameters_{i})s::jsonb, %(status_{i})s)") + + with pg_client.PostgresClient() as cur: + cur.execute( + cur.mogrify( + f"""INSERT INTO traces(user_id, tenant_id, created_at, auth, action, method, path_format, endpoint, payload, parameters, status) + VALUES {" , ".join(values)};""", + params) + ) + + +async def process_trace(action: str, path_format: str, request: Request, response: Response): + if not hasattr(request.state, "currentContext"): + return + current_context: CurrentContext = request.state.currentContext + body: json = None + if request.method in ["POST", "PUT", "DELETE"]: + body = await request.json() + intersect = list(set(body.keys()) & set(IGNORE_IN_PAYLOAD)) + for attribute in intersect: + body[attribute] = "HIDDEN" + current_trace = TraceSchema(tenant_id=current_context.tenant_id, + user_id=current_context.user_id if isinstance(current_context, CurrentContext) \ + else None, + auth="jwt" if isinstance(current_context, CurrentContext) else "apiKey", + action=action, + endpoint=str(request.url.path), method=request.method, + payload=body, + parameters=dict(request.query_params), + status=response.status_code, + path_format=path_format, + created_at=TimeUTC.now()) + if not hasattr(main_app.app, "queue_system"): + main_app.app.queue_system = queue.Queue() + q: queue.Queue = main_app.app.queue_system + q.put(current_trace) + + +def trace(action: str, path_format: str, request: Request, response: Response): + for p in IGNORE_ROUTES: + if (isinstance(p["path"], str) and p["path"] == path_format \ + or isinstance(p["path"], re.Pattern) and re.search(p["path"], path_format)) \ + and (p["method"][0] == "*" or request.method in p["method"]): + return + background_task: BackgroundTask = BackgroundTask(process_trace, action, path_format, request, response) + if response.background is None: + response.background = background_task + else: + response.background.add_task(background_task.func, *background_task.args, *background_task.kwargs) + + +async def process_traces_queue(): + queue_system: queue.Queue = main_app.app.queue_system + traces = [] + while not queue_system.empty(): + obj = queue_system.get_nowait() + traces.append(obj) + if len(traces) > 0: + await write_traces_batch(traces) + + +cron_jobs = [ + {"func": process_traces_queue, "trigger": "interval", "seconds": config("traces_period", cast=int, default=60), + "misfire_grace_time": 20} +] diff --git a/ee/api/chalicelib/core/unlock.py b/ee/api/chalicelib/core/unlock.py index f4d5da19c..d656edf8a 100644 --- a/ee/api/chalicelib/core/unlock.py +++ b/ee/api/chalicelib/core/unlock.py @@ -1,7 +1,10 @@ -from chalicelib.utils.helper import environ -from chalicelib.utils.TimeUTC import TimeUTC -import requests import uuid +from os import environ + +import requests +from decouple import config + +from chalicelib.utils.TimeUTC import TimeUTC def __get_mid(): @@ -9,7 +12,7 @@ def __get_mid(): def get_license(): - return environ.get("LICENSE_KEY", "") + return config("LICENSE_KEY", default="") def check(): @@ -33,10 +36,10 @@ def check(): def get_expiration_date(): - return int(environ.get("expiration", 0)) + return config("expiration", default=0, cast=int) def is_valid(): - if environ.get("lastCheck") is None: + if config("lastCheck", default=None) is None or (get_expiration_date() - TimeUTC.now()) <= 0: check() return get_expiration_date() - TimeUTC.now() > 0 diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index f82abd191..7838a68ac 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -1,13 +1,15 @@ import json import secrets -from chalicelib.core import authorizers, metadata, projects, assist -from chalicelib.core import tenants +from decouple import config +from fastapi import BackgroundTasks + +from chalicelib.core import authorizers, metadata, projects, roles +from chalicelib.core import tenants, assist from chalicelib.utils import dev, SAML2_helper -from chalicelib.utils import helper +from chalicelib.utils import helper, email_helper from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC -from chalicelib.utils.helper import environ def __generate_invitation_token(): @@ -20,7 +22,7 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal WITH u AS ( INSERT INTO public.users (tenant_id, email, role, name, data, role_id) VALUES (%(tenantId)s, %(email)s, %(role)s, %(name)s, %(data)s, %(role_id)s) - RETURNING user_id,email,role,name,appearance, role_id + RETURNING tenant_id,user_id,email,role,name,appearance, role_id ), au AS (INSERT INTO public.basic_authentication (user_id, generated_password, invitation_token, invited_at) VALUES ((SELECT user_id FROM u), TRUE, %(invitation_token)s, timezone('utc'::text, now())) @@ -36,8 +38,11 @@ def create_new_member(tenant_id, email, invitation_token, admin, name, owner=Fal (CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, (CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member, au.invitation_token, - u.role_id - FROM u,au;""", + u.role_id, + roles.name AS role_name, + roles.permissions, + TRUE AS has_password + FROM au,u LEFT JOIN roles USING(tenant_id) WHERE roles.role_id IS NULL OR roles.role_id = %(role_id)s;""", {"tenantId": tenant_id, "email": email, "role": "owner" if owner else "admin" if admin else "member", "name": name, "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), @@ -190,7 +195,7 @@ def update(tenant_id, user_id, changes): return helper.dict_to_camel_case(cur.fetchone()) -def create_member(tenant_id, user_id, data): +def create_member(tenant_id, user_id, data, background_tasks: BackgroundTasks): admin = get(tenant_id=tenant_id, user_id=user_id) if not admin["admin"] and not admin["superAdmin"]: return {"errors": ["unauthorized"]} @@ -205,6 +210,8 @@ def create_member(tenant_id, user_id, data): if name is None: name = data["email"] role_id = data.get("roleId") + if role_id is None: + role_id = roles.get_role_by_name(tenant_id=tenant_id, name="member").get("roleId") invitation_token = __generate_invitation_token() user = get_deleted_user_by_email(email=data["email"]) if user is not None: @@ -214,18 +221,24 @@ def create_member(tenant_id, user_id, data): new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token, admin=data.get("admin", False), name=name, role_id=role_id) new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken")) - helper.async_post(environ['email_basic'] % 'member_invitation', - { - "email": data["email"], - "invitationLink": new_member["invitationLink"], - "clientId": tenants.get_by_tenant_id(tenant_id)["name"], - "senderName": admin["name"] - }) + # helper.async_post(config('email_basic') % 'member_invitation', + # { + # "email": data["email"], + # "invitationLink": new_member["invitationLink"], + # "clientId": tenants.get_by_tenant_id(tenant_id)["name"], + # "senderName": admin["name"] + # }) + background_tasks.add_task(email_helper.send_team_invitation, **{ + "recipient": data["email"], + "invitation_link": new_member["invitationLink"], + "client_id": tenants.get_by_tenant_id(tenant_id)["name"], + "sender_name": admin["name"] + }) return {"data": new_member} def __get_invitation_link(invitation_token): - return environ["SITE_URL"] + environ["invitation_link"] % invitation_token + return config("SITE_URL") + config("invitation_link") % invitation_token def allow_password_change(user_id, delta_min=10): @@ -299,12 +312,15 @@ def edit(user_id_to_update, tenant_id, changes, editor_id): admin = get(tenant_id=tenant_id, user_id=editor_id) if not admin["superAdmin"] and not admin["admin"]: return {"errors": ["unauthorized"]} - if user["superAdmin"] and "admin" in changes: - changes.pop("admin") + if editor_id == user_id_to_update: + if user["superAdmin"]: + changes.pop("admin") + elif user["admin"] != changes["admin"]: + return {"errors": ["cannot change your own role"]} keys = list(changes.keys()) for k in keys: - if k not in ALLOW_EDIT: + if k not in ALLOW_EDIT or changes[k] is None: changes.pop(k) keys = list(changes.keys()) diff --git a/ee/api/chalicelib/core/webhook.py b/ee/api/chalicelib/core/webhook.py index 20e873f5c..cb7cf509e 100644 --- a/ee/api/chalicelib/core/webhook.py +++ b/ee/api/chalicelib/core/webhook.py @@ -1,6 +1,7 @@ +import requests + from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC -import requests def get_by_id(webhook_id): @@ -121,7 +122,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="", def add_edit(tenant_id, data, replace_none=None): - if "webhookId" in data: + if data.get("webhookId") is not None: return update(tenant_id=tenant_id, webhook_id=data["webhookId"], changes={"endpoint": data["endpoint"], "authHeader": None if "authHeader" not in data else data["authHeader"], diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index 25f279d3a..a2a4e1e6e 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -1,21 +1,22 @@ from http import cookies -from urllib.parse import urlparse, parse_qsl +from urllib.parse import urlparse +from decouple import config +from fastapi import Request from onelogin.saml2.auth import OneLogin_Saml2_Auth - -from chalicelib.utils.helper import environ +from starlette.datastructures import FormData SAML2 = { "strict": True, "debug": True, "sp": { - "entityId": environ["SITE_URL"] + "/api/sso/saml2/metadata/", + "entityId": config("SITE_URL") + "/api/sso/saml2/metadata/", "assertionConsumerService": { - "url": environ["SITE_URL"] + "/api/sso/saml2/acs", + "url": config("SITE_URL") + "/api/sso/saml2/acs", "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" }, "singleLogoutService": { - "url": environ["SITE_URL"] + "/api/sso/saml2/sls", + "url": config("SITE_URL") + "/api/sso/saml2/sls", "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" }, "NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress", @@ -26,28 +27,28 @@ SAML2 = { } idp = None # SAML2 config handler -if environ.get("SAML2_MD_URL") is not None and len(environ["SAML2_MD_URL"]) > 0: +if config("SAML2_MD_URL", default=None) is not None and len(config("SAML2_MD_URL")) > 0: print("SAML2_MD_URL provided, getting IdP metadata config") from onelogin.saml2.idp_metadata_parser import OneLogin_Saml2_IdPMetadataParser - idp_data = OneLogin_Saml2_IdPMetadataParser.parse_remote(environ.get("SAML2_MD_URL")) + idp_data = OneLogin_Saml2_IdPMetadataParser.parse_remote(config("SAML2_MD_URL", default=None)) idp = idp_data.get("idp") if SAML2["idp"] is None: - if len(environ.get("idp_entityId", "")) > 0 \ - and len(environ.get("idp_sso_url", "")) > 0 \ - and len(environ.get("idp_x509cert", "")) > 0: + if len(config("idp_entityId", default="")) > 0 \ + and len(config("idp_sso_url", default="")) > 0 \ + and len(config("idp_x509cert", default="")) > 0: idp = { - "entityId": environ["idp_entityId"], + "entityId": config("idp_entityId"), "singleSignOnService": { - "url": environ["idp_sso_url"], + "url": config("idp_sso_url"), "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" }, - "x509cert": environ["idp_x509cert"] + "x509cert": config("idp_x509cert") } - if len(environ.get("idp_sls_url", "")) > 0: + if len(config("idp_sls_url", default="")) > 0: idp["singleLogoutService"] = { - "url": environ["idp_sls_url"], + "url": config("idp_sls_url"), "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" } @@ -67,10 +68,10 @@ def init_saml_auth(req): return auth -def prepare_request(request): +async def prepare_request(request: Request): request.args = dict(request.query_params).copy() if request.query_params else {} - request.form = dict(request.json_body).copy() if request.json_body else dict( - parse_qsl(request.raw_body.decode())) if request.raw_body else {} + form: FormData = await request.form() + request.form = dict(form) cookie_str = request.headers.get("cookie", "") if "session" in cookie_str: cookie = cookies.SimpleCookie() @@ -90,7 +91,7 @@ def prepare_request(request): 'https': 'on' if request.headers.get('x-forwarded-proto', 'http') == 'https' else 'off', 'http_host': request.headers['host'], 'server_port': url_data.port, - 'script_name': "/api" + request.path, + 'script_name': "/api" + request.url.path, 'get_data': request.args.copy(), # Uncomment if using ADFS as IdP, https://github.com/onelogin/python-saml/pull/144 # 'lowercase_urlencoding': True, @@ -105,9 +106,9 @@ def is_saml2_available(): def get_saml2_provider(): - return environ.get("idp_name", "saml2") if is_saml2_available() and len( - environ.get("idp_name", "saml2")) > 0 else None + return config("idp_name", default="saml2") if is_saml2_available() and len( + config("idp_name", default="saml2")) > 0 else None def get_landing_URL(jwt): - return environ["SITE_URL"] + environ.get("sso_landing", "/login?jwt=%s") % jwt + return config("SITE_URL") + config("sso_landing", default="/login?jwt=%s") % jwt diff --git a/ee/api/chalicelib/utils/assist_helper.py b/ee/api/chalicelib/utils/assist_helper.py index d31cadd1f..d026aa106 100644 --- a/ee/api/chalicelib/utils/assist_helper.py +++ b/ee/api/chalicelib/utils/assist_helper.py @@ -3,14 +3,15 @@ import hashlib import hmac from time import time +from decouple import config + from chalicelib.core import assist from chalicelib.utils import helper -from chalicelib.utils.helper import environ def __get_secret(): - return environ["assist_secret"] if environ["assist_secret"] is not None and len( - environ["assist_secret"]) > 0 else None + return config("assist_secret") if config("assist_secret", default=None) is not None \ + and len(config("assist_secret")) > 0 else None def get_temporary_credentials(): @@ -18,7 +19,7 @@ def get_temporary_credentials(): if secret is None: return {"errors": ["secret not defined"]} user = helper.generate_salt() - ttl = int(environ.get("assist_ttl", 48)) * 3600 + ttl = config("assist_ttl", cast=int, default=48) * 3600 timestamp = int(time()) + ttl username = str(timestamp) + ':' + user dig = hmac.new(bytes(secret, 'utf-8'), bytes(username, 'utf-8'), hashlib.sha1) diff --git a/ee/api/chalicelib/utils/ch_client.py b/ee/api/chalicelib/utils/ch_client.py index cfe635b4a..babdd669a 100644 --- a/ee/api/chalicelib/utils/ch_client.py +++ b/ee/api/chalicelib/utils/ch_client.py @@ -1,14 +1,14 @@ import clickhouse_driver -from chalicelib.utils.helper import environ +from decouple import config class ClickHouseClient: __client = None def __init__(self): - self.__client = clickhouse_driver.Client(host=environ["ch_host"], + self.__client = clickhouse_driver.Client(host=config("ch_host"), database="default", - port=int(environ["ch_port"])) \ + port=config("ch_port", cast=int)) \ if self.__client is None else self.__client def __enter__(self): diff --git a/ee/api/or_dependencies.py b/ee/api/or_dependencies.py new file mode 100644 index 000000000..2c12da469 --- /dev/null +++ b/ee/api/or_dependencies.py @@ -0,0 +1,45 @@ +import json +from typing import Callable + +from fastapi.routing import APIRoute +from starlette import status +from starlette.exceptions import HTTPException +from starlette.requests import Request +from starlette.responses import Response, JSONResponse + +import schemas +from chalicelib.core import traces + + +async def OR_context(request: Request) -> schemas.CurrentContext: + if hasattr(request.state, "currentContext"): + return request.state.currentContext + else: + raise Exception("currentContext not found") + + +class ORRoute(APIRoute): + def get_route_handler(self) -> Callable: + original_route_handler = super().get_route_handler() + + async def custom_route_handler(request: Request) -> Response: + try: + response: Response = await original_route_handler(request) + except HTTPException as e: + if e.status_code // 100 == 4: + return JSONResponse(content={"errors": [e.detail]}, status_code=e.status_code) + else: + raise e + + if isinstance(response, JSONResponse): + response: JSONResponse = response + body = json.loads(response.body.decode('utf8')) + if response.status_code == 200 and body is not None and body.get("errors") is not None: + if "not found" in body["errors"][0]: + response.status_code = status.HTTP_404_NOT_FOUND + else: + response.status_code = status.HTTP_400_BAD_REQUEST + traces.trace(action=self.name, path_format=self.path_format, request=request, response=response) + return response + + return custom_route_handler diff --git a/ee/api/prepare-local.sh b/ee/api/prepare-local.sh new file mode 100755 index 000000000..c0a3db182 --- /dev/null +++ b/ee/api/prepare-local.sh @@ -0,0 +1,2 @@ +#!/bin/bash +rsync -avr --exclude=".*" --ignore-existing ../../api/* ./ \ No newline at end of file diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index e241b5edd..82329a1f2 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -3,11 +3,14 @@ urllib3==1.26.6 boto3==1.16.1 pyjwt==1.7.1 psycopg2-binary==2.8.6 -pytz==2020.1 -sentry-sdk==0.19.1 elasticsearch==7.9.1 jira==2.0.0 -schedule==1.1.0 -croniter==1.0.12 -clickhouse-driver==0.1.5 -python3-saml==1.12.0 \ No newline at end of file +clickhouse-driver==0.2.2 +python3-saml==1.12.0 + +fastapi==0.70.1 +python-multipart==0.0.5 +uvicorn[standard]==0.16.0 +python-decouple==3.5 +pydantic[email]==1.8.2 +apscheduler==3.8.1 \ No newline at end of file diff --git a/ee/api/routers/app/v1_api_ee.py b/ee/api/routers/app/v1_api_ee.py new file mode 100644 index 000000000..3ca55d3e9 --- /dev/null +++ b/ee/api/routers/app/v1_api_ee.py @@ -0,0 +1,12 @@ +from chalicelib.utils import assist_helper +from routers.base import get_routers + +public_app, app, app_apikey = get_routers() + + +@app_apikey.get('/v1/assist/credentials', tags=["api"]) +def get_assist_credentials(): + credentials = assist_helper.get_temporary_credentials() + if "errors" in credentials: + return credentials + return {"data": credentials} diff --git a/ee/api/routers/base.py b/ee/api/routers/base.py new file mode 100644 index 000000000..5c665b2d1 --- /dev/null +++ b/ee/api/routers/base.py @@ -0,0 +1,14 @@ +from fastapi import APIRouter, Depends + +from auth.auth_apikey import APIKeyAuth +from auth.auth_jwt import JWTAuth +from auth.auth_project import ProjectAuthorizer +from or_dependencies import ORRoute + + +def get_routers() -> (APIRouter, APIRouter, APIRouter): + public_app = APIRouter(route_class=ORRoute) + app = APIRouter(dependencies=[Depends(JWTAuth()), Depends(ProjectAuthorizer("projectId"))], route_class=ORRoute) + app_apikey = APIRouter(dependencies=[Depends(APIKeyAuth()), Depends(ProjectAuthorizer("projectKey"))], + route_class=ORRoute) + return public_app, app, app_apikey diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py new file mode 100644 index 000000000..a882c091b --- /dev/null +++ b/ee/api/routers/core_dynamic.py @@ -0,0 +1,222 @@ +from typing import Optional + +from decouple import config +from fastapi import Body, Depends, HTTPException, status, BackgroundTasks +from starlette.responses import RedirectResponse + +import schemas +import schemas_ee +from chalicelib.core import integrations_manager +from chalicelib.core import sessions +from chalicelib.core import tenants, users, metadata, projects, license, alerts, assist +from chalicelib.core import webhook +from chalicelib.core.collaboration_slack import Slack +from chalicelib.utils import captcha, SAML2_helper +from chalicelib.utils import helper +from or_dependencies import OR_context +from routers.base import get_routers + +public_app, app, app_apikey = get_routers() + + +@public_app.get('/signup', tags=['signup']) +def get_all_signup(): + return {"data": {"tenants": tenants.tenants_exists(), + "sso": SAML2_helper.is_saml2_available(), + "ssoProvider": SAML2_helper.get_saml2_provider(), + "edition": helper.get_edition()}} + + +@public_app.post('/login', tags=["authentication"]) +def login(data: schemas.UserLoginSchema = Body(...)): + if helper.allow_captcha() and not captcha.is_valid(data.g_recaptcha_response): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid captcha." + ) + + r = users.authenticate(data.email, data.password, for_plugin=False) + if r is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="You’ve entered invalid Email or Password." + ) + + tenant_id = r.pop("tenantId") + + r["limits"] = { + "teamMember": -1, + "projects": -1, + "metadata": metadata.get_remaining_metadata_with_count(tenant_id)} + + c = tenants.get_by_tenant_id(tenant_id) + c.pop("createdAt") + c["projects"] = projects.get_projects(tenant_id=tenant_id, recording_state=True, recorded=True, + stack_integrations=True, version=True) + c["smtp"] = helper.has_smtp() + c["iceServers"]: assist.get_ice_servers() + return { + 'jwt': r.pop('jwt'), + 'data': { + "user": r, + "client": c + } + } + + +@app.get('/account', tags=['accounts']) +def get_account(context: schemas.CurrentContext = Depends(OR_context)): + r = users.get(tenant_id=context.tenant_id, user_id=context.user_id) + return { + 'data': { + **r, + "limits": { + "teamMember": -1, + "projects": -1, + "metadata": metadata.get_remaining_metadata_with_count(context.tenant_id) + }, + **license.get_status(context.tenant_id), + "smtp": helper.has_smtp(), + "saml2": SAML2_helper.is_saml2_available(), + "iceServers": assist.get_ice_servers() + } + } + + +@app.get('/projects/limit', tags=['projects']) +def get_projects_limit(context: schemas.CurrentContext = Depends(OR_context)): + return {"data": { + "current": projects.count_by_tenant(tenant_id=context.tenant_id), + "remaining": -1 + }} + + +@app.get('/projects/{projectId}', tags=['projects']) +def get_project(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = projects.get_project(tenant_id=context.tenant_id, project_id=projectId, include_last_session=True, + include_gdpr=True) + if data is None: + return {"errors": ["project not found"]} + return {"data": data} + + +@app.put('/integrations/slack', tags=['integrations']) +@app.post('/integrations/slack', tags=['integrations']) +def add_slack_client(data: schemas.AddSlackSchema, context: schemas.CurrentContext = Depends(OR_context)): + n = Slack.add_channel(tenant_id=context.tenant_id, url=data.url, name=data.name) + if n is None: + return { + "errors": ["We couldn't send you a test message on your Slack channel. Please verify your webhook url."] + } + return {"data": n} + + +@app.put('/integrations/slack/{integrationId}', tags=['integrations']) +@app.post('/integrations/slack/{integrationId}', tags=['integrations']) +def edit_slack_integration(integrationId: int, data: schemas.EditSlackSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + if len(data.url) > 0: + old = webhook.get(tenant_id=context.tenant_id, webhook_id=integrationId) + if old["endpoint"] != data.url: + if not Slack.say_hello(data.url): + return { + "errors": [ + "We couldn't send you a test message on your Slack channel. Please verify your webhook url."] + } + return {"data": webhook.update(tenant_id=context.tenant_id, webhook_id=integrationId, + changes={"name": data.name, "endpoint": data.url})} + + +# this endpoint supports both jira & github based on `provider` attribute +@app.post('/integrations/issues', tags=["integrations"]) +def add_edit_jira_cloud_github(data: schemas.JiraGithubSchema, + context: schemas.CurrentContext = Depends(OR_context)): + provider = data.provider.upper() + error, integration = integrations_manager.get_integration(tool=provider, tenant_id=context.tenant_id, + user_id=context.user_id) + if error is not None: + return error + return {"data": integration.add_edit(data=data.dict())} + + +@app.post('/client/members', tags=["client"]) +@app.put('/client/members', tags=["client"]) +def add_member(background_tasks: BackgroundTasks, data: schemas_ee.CreateMemberSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(), + background_tasks=background_tasks) + + +@public_app.get('/users/invitation', tags=['users']) +def process_invitation_link(token: str): + if token is None or len(token) < 64: + return {"errors": ["please provide a valid invitation"]} + user = users.get_by_invitation_token(token) + if user is None: + return {"errors": ["invitation not found"]} + if user["expiredInvitation"]: + return {"errors": ["expired invitation, please ask your admin to send a new one"]} + if user["expiredChange"] is not None and not user["expiredChange"] \ + and user["changePwdToken"] is not None and user["changePwdAge"] < -5 * 60: + pass_token = user["changePwdToken"] + else: + pass_token = users.allow_password_change(user_id=user["userId"]) + return RedirectResponse(url=config("SITE_URL") + config("change_password_link") % (token, pass_token)) + + +@public_app.post('/password/reset', tags=["users"]) +@public_app.put('/password/reset', tags=["users"]) +def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = Body(...)): + if data is None or len(data.invitation) < 64 or len(data.passphrase) < 8: + return {"errors": ["please provide a valid invitation & pass"]} + user = users.get_by_invitation_token(token=data.invitation, pass_token=data.passphrase) + if user is None: + return {"errors": ["invitation not found"]} + if user["expiredChange"]: + return {"errors": ["expired change, please re-use the invitation link"]} + + return users.set_password_invitation(new_password=data.password, user_id=user["userId"], tenant_id=user["tenantId"]) + + +@app.put('/client/members/{memberId}', tags=["client"]) +@app.post('/client/members/{memberId}', tags=["client"]) +def edit_member(memberId: int, data: schemas_ee.EditMemberSchema, + context: schemas.CurrentContext = Depends(OR_context)): + return users.edit(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data.dict(), + user_id_to_update=memberId) + + +@app.get('/metadata/session_search', tags=["metadata"]) +def search_sessions_by_metadata(key: str, value: str, projectId: Optional[int] = None, + context: schemas.CurrentContext = Depends(OR_context)): + if key is None or value is None or len(value) == 0 and len(key) == 0: + return {"errors": ["please provide a key&value for search"]} + + if projectId is not None and not projects.is_authorized(project_id=projectId, tenant_id=context.tenant_id): + return {"errors": ["unauthorized project"]} + if len(value) == 0: + return {"errors": ["please provide a value for search"]} + if len(key) == 0: + return {"errors": ["please provide a key for search"]} + return { + "data": sessions.search_by_metadata(tenant_id=context.tenant_id, user_id=context.user_id, m_value=value, + m_key=key, project_id=projectId)} + + +@app.get('/plans', tags=["plan"]) +def get_current_plan(context: schemas.CurrentContext = Depends(OR_context)): + return { + "data": license.get_status(context.tenant_id) + } + + +@public_app.post('/alerts/notifications', tags=["alerts"]) +@public_app.put('/alerts/notifications', tags=["alerts"]) +def send_alerts_notifications(background_tasks: BackgroundTasks, data: schemas.AlertNotificationSchema = Body(...)): + # TODO: validate token + return {"data": alerts.process_notifications(data.notifications, background_tasks=background_tasks)} + + +@public_app.get('/general_stats', tags=["private"], include_in_schema=False) +def get_general_stats(): + return {"data": {"sessions:": sessions.count_all()}} diff --git a/ee/api/routers/crons/core_dynamic_crons.py b/ee/api/routers/crons/core_dynamic_crons.py new file mode 100644 index 000000000..bdde42a15 --- /dev/null +++ b/ee/api/routers/crons/core_dynamic_crons.py @@ -0,0 +1,18 @@ +from chalicelib.core import telemetry, unlock + + +def telemetry_cron() -> None: + telemetry.compute() + + +# @app.schedule(Cron('0/60', '*', '*', '*', '?', '*')) +def unlock_cron() -> None: + print("validating license") + unlock.check() + print(f"valid: {unlock.is_valid()}") + + +cron_jobs = [ + {"func": telemetry_cron, "trigger": "cron", "day_of_week": "*"}, + {"func": unlock_cron, "trigger": "cron", "hour": "*"} +] diff --git a/ee/api/routers/ee.py b/ee/api/routers/ee.py new file mode 100644 index 000000000..52ffaad8f --- /dev/null +++ b/ee/api/routers/ee.py @@ -0,0 +1,60 @@ +from chalicelib.core import roles +from chalicelib.core import unlock +from chalicelib.utils import assist_helper + +unlock.check() + +from or_dependencies import OR_context +from routers.base import get_routers +import schemas +import schemas_ee +from fastapi import Depends, Body + +public_app, app, app_apikey = get_routers() + + +@app.get('/client/roles', tags=["client", "roles"]) +def get_roles(context: schemas.CurrentContext = Depends(OR_context)): + return { + 'data': roles.get_roles(tenant_id=context.tenant_id) + } + + +@app.post('/client/roles', tags=["client", "roles"]) +@app.put('/client/roles', tags=["client", "roles"]) +def add_role(data: schemas_ee.RolePayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): + data = roles.create(tenant_id=context.tenant_id, user_id=context.user_id, **data.dict()) + if "errors" in data: + return data + + return { + 'data': data + } + + +@app.post('/client/roles/{roleId}', tags=["client", "roles"]) +@app.put('/client/roles/{roleId}', tags=["client", "roles"]) +def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = roles.update(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId, changes=data.dict()) + if "errors" in data: + return data + + return { + 'data': data + } + + +@app.delete('/client/roles/{roleId}', tags=["client", "roles"]) +def delete_role(roleId: int, context: schemas.CurrentContext = Depends(OR_context)): + data = roles.delete(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId) + if "errors" in data: + return data + return { + 'data': data + } + + +@app.get('/assist/credentials', tags=["assist"]) +def get_assist_credentials(): + return {"data": assist_helper.get_full_config()} diff --git a/ee/api/chalicelib/blueprints/bp_saml.py b/ee/api/routers/saml.py similarity index 74% rename from ee/api/chalicelib/blueprints/bp_saml.py rename to ee/api/routers/saml.py index 814d93a9c..c9f074324 100644 --- a/ee/api/chalicelib/blueprints/bp_saml.py +++ b/ee/api/routers/saml.py @@ -1,35 +1,33 @@ -from chalice import Blueprint +from fastapi import HTTPException +from fastapi import Request, Response -from chalicelib import _overrides from chalicelib.utils import SAML2_helper from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth +from routers.base import get_routers -app = Blueprint(__name__) -_overrides.chalice_app(app) - -from chalicelib.utils.helper import environ +public_app, app, app_apikey = get_routers() +from decouple import config from onelogin.saml2.auth import OneLogin_Saml2_Logout_Request -from chalice import Response from chalicelib.core import users, tenants, roles +from starlette.responses import RedirectResponse +from starlette import status -@app.route('/sso/saml2', methods=['GET'], authorizer=None) -def start_sso(): - app.current_request.path = '' - req = prepare_request(request=app.current_request) +@public_app.get("/sso/saml2", tags=["saml2"]) +async def start_sso(request: Request): + request.path = '' + req = await prepare_request(request=request) auth = init_saml_auth(req) sso_built_url = auth.login() - return Response( - status_code=307, - body='', - headers={'Location': sso_built_url, 'Content-Type': 'text/plain'}) + return RedirectResponse(url=sso_built_url) -@app.route('/sso/saml2/acs', methods=['POST'], content_types=['application/x-www-form-urlencoded'], authorizer=None) -def process_sso_assertion(): - req = prepare_request(request=app.current_request) +# @public_app.post('/sso/saml2/acs', tags=["saml2"], content_types=['application/x-www-form-urlencoded']) +@public_app.post('/sso/saml2/acs', tags=["saml2"]) +async def process_sso_assertion(request: Request): + req = await prepare_request(request=request) session = req["cookie"]["session"] auth = init_saml_auth(req) @@ -94,19 +92,18 @@ def process_sso_assertion(): changes={"origin": SAML2_helper.get_saml2_provider(), "internal_id": internal_id}) expiration = auth.get_session_expiration() expiration = expiration if expiration is not None and expiration > 10 * 60 \ - else int(environ.get("sso_exp_delta_seconds", 24 * 60 * 60)) + else int(config("sso_exp_delta_seconds", cast=int, default=24 * 60 * 60)) jwt = users.authenticate_sso(email=email, internal_id=internal_id, exp=expiration) if jwt is None: return {"errors": ["null JWT"]} return Response( - status_code=302, - body='', - headers={'Location': SAML2_helper.get_landing_URL(jwt), 'Content-Type': 'text/plain'}) + status_code=status.HTTP_302_FOUND, + headers={'Location': SAML2_helper.get_landing_URL(jwt)}) -@app.route('/sso/saml2/sls', methods=['GET'], authorizer=None) -def process_sls_assertion(): - req = prepare_request(request=app.current_request) +@public_app.get('/sso/saml2/sls', tags=["saml2"]) +async def process_sls_assertion(request: Request): + req = await prepare_request(request=request) session = req["cookie"]["session"] auth = init_saml_auth(req) request_id = None @@ -134,20 +131,14 @@ def process_sls_assertion(): print("Preprocessed SLS-Request by SP") if url is not None: - return Response( - status_code=307, - body='', - headers={'Location': url, 'Content-Type': 'text/plain'}) + return RedirectResponse(url=url) - return Response( - status_code=307, - body='', - headers={'Location': environ["SITE_URL"], 'Content-Type': 'text/plain'}) + return RedirectResponse(url=config("SITE_URL")) -@app.route('/sso/saml2/metadata', methods=['GET'], authorizer=None) -def saml2_metadata(): - req = prepare_request(request=app.current_request) +@public_app.get('/sso/saml2/metadata', tags=["saml2"]) +async def saml2_metadata(request: Request): + req = await prepare_request(request=request) auth = init_saml_auth(req) settings = auth.get_settings() metadata = settings.get_sp_metadata() @@ -155,10 +146,10 @@ def saml2_metadata(): if len(errors) == 0: return Response( - status_code=200, - body=metadata, + status_code=status.HTTP_200_OK, + content=metadata, headers={'Content-Type': 'text/xml'}) else: - return Response( - status_code=500, - body=', '.join(errors)) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=', '.join(errors)) diff --git a/ee/api/run-dev.sh b/ee/api/run-dev.sh new file mode 100755 index 000000000..76682286d --- /dev/null +++ b/ee/api/run-dev.sh @@ -0,0 +1,3 @@ +#!/bin/zsh + +uvicorn app:app --reload \ No newline at end of file diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py new file mode 100644 index 000000000..e278f3077 --- /dev/null +++ b/ee/api/schemas_ee.py @@ -0,0 +1,19 @@ +from typing import Optional, List + +from pydantic import BaseModel, Field + +import schemas + + +class RolePayloadSchema(BaseModel): + name: str = Field(...) + description: Optional[str] = Field(None) + permissions: List[str] = Field(...) + + +class CreateMemberSchema(schemas.CreateMemberSchema): + roleId: Optional[int] = Field(None) + + +class EditMemberSchema(schemas.EditMemberSchema): + roleId: int = Field(...) diff --git a/ee/scripts/helm/db/init_dbs/postgresql/1.5.0/1.5.0.sql b/ee/scripts/helm/db/init_dbs/postgresql/1.5.0/1.5.0.sql new file mode 100644 index 000000000..5f0f4f054 --- /dev/null +++ b/ee/scripts/helm/db/init_dbs/postgresql/1.5.0/1.5.0.sql @@ -0,0 +1,18 @@ +BEGIN; +CREATE TABLE traces +( + user_id integer NULL REFERENCES users (user_id) ON DELETE CASCADE, + tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, + created_at bigint NOT NULL DEFAULT (EXTRACT(EPOCH FROM now() at time zone 'utc') * 1000)::bigint, + auth text NULL, + action text NOT NULL, + method text NOT NULL, + path_format text NOT NULL, + endpoint text NOT NULL, + payload jsonb NULL, + parameters jsonb NULL, + status int NULL +); +CREATE INDEX traces_user_id_idx ON traces (user_id); +CREATE INDEX traces_tenant_id_idx ON traces (tenant_id); +COMMIT; \ No newline at end of file diff --git a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql index 0b7e8fa40..3cf1d6751 100644 --- a/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/helm/db/init_dbs/postgresql/init_schema.sql @@ -910,6 +910,23 @@ $$ CREATE INDEX ON jobs (start_at); + CREATE TABLE traces + ( + user_id integer NULL REFERENCES users (user_id) ON DELETE CASCADE, + tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, + created_at bigint NOT NULL DEFAULT (EXTRACT(EPOCH FROM now() at time zone 'utc') * 1000)::bigint, + auth text NULL, + action text NOT NULL, + method text NOT NULL, + path_format text NOT NULL, + endpoint text NOT NULL, + payload jsonb NULL, + parameters jsonb NULL, + status int NULL + ); + CREATE INDEX traces_user_id_idx ON traces (user_id); + CREATE INDEX traces_tenant_id_idx ON traces (tenant_id); + raise notice 'DB created'; END IF; END;