From a34179365e92d4489aa1424f30b7f7057c4e31fb Mon Sep 17 00:00:00 2001 From: Kraiem Taha Yassine Date: Wed, 6 Sep 2023 17:06:33 +0100 Subject: [PATCH] Api v1.15.0 (#1464) * feat(DB): rearranged queries feat(DB): ready for v1.15.0 * refactor(chalice): upgraded dependencies refactor(crons): upgraded dependencies refactor(alerts): upgraded dependencies * fix(chalice): return error when updating inexistant webhook * feat(chalice): fixed delete webhook response * feat(chalice): limit webhooks name length * feat(chalice): upgraded dependencies feat(alerts): upgraded dependencies feat(crons): upgraded dependencies * fix(chalice): remove urllib3 dependency * feat(chalice): remove FOSS to pydantic v2 * fix(chalice): freeze urllib3 to not have conflicts between boto3 and requests * feat(chalice): refactoring schema in progress * feat(chalice): refactoring schema in progress * feat(chalice): refactoring schema in progress * feat(chalice): refactoring schema in progress feat(chalice): upgraded dependencies * feat(chalice): refactored schema * fix(chalice): pull rebase dev * feat(DB): transfer size support * feat(chalice): support service account * feat(chalice): support service account * fix(chalice): fixed refactored PayloadSchema-name * feat(chalice): path analysis * feat(chalice): support service account 1/2 * feat(DB): timezone support * feat(chalice): upgraded dependencies feat(alerts): upgraded dependencies feat(crons): upgraded dependencies feat(assist): upgraded dependencies feat(sourcemaps): upgraded dependencies * feat(chalice): path analysis schema changes * feat(chalice): path analysis query change * feat(chalice): path analysis query change * feat(chalice): ios replay support * feat(chalice): ios replay support * feat(chalice): path analysis changes * feat(chalice): upgraded dependencies * feat(chalice): simple hide minor paths * feat(chalice): path analysis density * feat(chalice): session's replay ios events * feat(chalice): fixed typo * feat(chalice): support project's platform * feat(DB): support project's platform * feat(chalice): path analysis EE in progress * feat(chalice): project's platform API * feat(chalice): fixed create project * feat(chalice): EE path analysis in progress * feat(chalice): EE path analysis refactor(chalice): support specific database name for clickhouse-client * feat(chalice): upgraded dependencies feat(chalice): path analysis specific event type for startPoint feat(chalice): path analysis specific event type for endPoint feat(chalice): path analysis specific event type for exclude * refactoring(chalice): changed IOS click event type --- api/Pipfile | 22 +- api/auth/auth_jwt.py | 22 +- api/chalicelib/core/alerts.py | 4 +- api/chalicelib/core/authorizers.py | 8 +- api/chalicelib/core/click_maps.py | 4 +- api/chalicelib/core/custom_metrics.py | 326 ++- api/chalicelib/core/dashboards.py | 12 +- api/chalicelib/core/errors.py | 56 +- api/chalicelib/core/events.py | 5 +- api/chalicelib/core/events_ios.py | 37 +- api/chalicelib/core/feature_flags.py | 14 +- api/chalicelib/core/funnels.py | 10 +- api/chalicelib/core/heatmaps.py | 9 +- api/chalicelib/core/integration_github.py | 9 +- api/chalicelib/core/integration_jira_cloud.py | 17 +- api/chalicelib/core/log_tool_bugsnag.py | 12 +- api/chalicelib/core/log_tool_cloudwatch.py | 19 +- api/chalicelib/core/log_tool_datadog.py | 11 +- api/chalicelib/core/log_tool_elasticsearch.py | 26 +- api/chalicelib/core/log_tool_newrelic.py | 16 +- api/chalicelib/core/log_tool_rollbar.py | 7 +- api/chalicelib/core/log_tool_sentry.py | 14 +- api/chalicelib/core/log_tool_stackdriver.py | 14 +- api/chalicelib/core/log_tool_sumologic.py | 15 +- api/chalicelib/core/metadata.py | 78 +- api/chalicelib/core/notifications.py | 2 +- api/chalicelib/core/product_analytics.py | 393 ++- api/chalicelib/core/projects.py | 31 +- api/chalicelib/core/saved_search.py | 5 +- api/chalicelib/core/sessions.py | 84 +- api/chalicelib/core/sessions_mobs.py | 40 +- api/chalicelib/core/sessions_notes.py | 6 +- api/chalicelib/core/sessions_replay.py | 25 +- api/chalicelib/core/significance.py | 66 +- api/chalicelib/core/signup.py | 2 +- api/chalicelib/core/users.py | 30 +- api/chalicelib/core/webhook.py | 26 +- api/env.default | 5 +- api/requirements-alerts.txt | 21 +- api/requirements.txt | 23 +- api/routers/core.py | 97 +- api/routers/core_dynamic.py | 67 +- api/routers/subs/health.py | 3 + api/routers/subs/insights.py | 8 +- api/routers/subs/metrics.py | 118 +- api/schemas.py | 1457 ----------- api/schemas/__init__.py | 2 + api/schemas/overrides.py | 62 + api/schemas/schemas.py | 1644 ++++++++++++ assist/package.json | 6 +- ee/api/.gitignore | 5 +- ee/api/Pipfile | 28 +- ee/api/app.py | 14 +- ee/api/auth/auth_jwt.py | 33 +- ee/api/chalicelib/core/assist_records.py | 22 +- ee/api/chalicelib/core/authorizers.py | 8 +- ee/api/chalicelib/core/custom_metrics.py | 390 +-- ee/api/chalicelib/core/errors.py | 56 +- ee/api/chalicelib/core/notifications.py | 2 +- ee/api/chalicelib/core/permissions.py | 4 +- ee/api/chalicelib/core/product_analytics.py | 2272 +++++++++-------- ee/api/chalicelib/core/projects.py | 86 +- ee/api/chalicelib/core/roles.py | 9 +- ee/api/chalicelib/core/sessions_devtool.py | 6 +- ee/api/chalicelib/core/sessions_exp.py | 22 +- ee/api/chalicelib/core/sessions_favorite.py | 8 +- ee/api/chalicelib/core/sessions_insights.py | 24 +- ee/api/chalicelib/core/sessions_notes.py | 5 +- ee/api/chalicelib/core/sessions_replay.py | 4 +- ee/api/chalicelib/core/signals.py | 4 +- ee/api/chalicelib/core/significance.py | 66 +- ee/api/chalicelib/core/significance_exp.py | 67 +- ee/api/chalicelib/core/signup.py | 5 +- ee/api/chalicelib/core/traces.py | 12 +- ee/api/chalicelib/core/users.py | 190 +- ee/api/chalicelib/core/webhook.py | 26 +- ee/api/chalicelib/utils/SAML2_helper.py | 4 +- ee/api/chalicelib/utils/ch_client.py | 5 +- ee/api/chalicelib/utils/events_queue.py | 4 +- ee/api/clean-dev.sh | 3 +- ee/api/or_dependencies.py | 15 +- ee/api/requirements-alerts.txt | 26 +- ee/api/requirements-crons.txt | 27 +- ee/api/requirements.txt | 28 +- ee/api/routers/core_dynamic.py | 101 +- ee/api/routers/ee.py | 48 +- ee/api/routers/subs/insights.py | 5 +- ee/api/routers/subs/metrics.py | 128 +- ee/api/schemas/__init__.py | 3 + ee/api/{ => schemas}/schemas_ee.py | 72 +- ee/assist/package.json | 12 +- .../db/init_dbs/clickhouse/1.15.0/1.15.0.sql | 7 + .../clickhouse/create/init_schema.sql | 3 + .../db/init_dbs/postgresql/1.14.0/1.14.0.sql | 8 + .../db/init_dbs/postgresql/1.15.0/1.15.0.sql | 36 + .../db/init_dbs/postgresql/init_schema.sql | 80 +- .../db/init_dbs/postgresql/1.15.0/1.15.0.sql | 36 + .../db/init_dbs/postgresql/init_schema.sql | 35 +- sourcemap-reader/package.json | 4 +- 99 files changed, 4999 insertions(+), 4049 deletions(-) delete mode 100644 api/schemas.py create mode 100644 api/schemas/__init__.py create mode 100644 api/schemas/overrides.py create mode 100644 api/schemas/schemas.py create mode 100644 ee/api/schemas/__init__.py rename ee/api/{ => schemas}/schemas_ee.py (60%) create mode 100644 ee/scripts/schema/db/init_dbs/clickhouse/1.15.0/1.15.0.sql create mode 100644 ee/scripts/schema/db/init_dbs/postgresql/1.15.0/1.15.0.sql create mode 100644 scripts/schema/db/init_dbs/postgresql/1.15.0/1.15.0.sql diff --git a/api/Pipfile b/api/Pipfile index 174b668fa..0807491bf 100644 --- a/api/Pipfile +++ b/api/Pipfile @@ -5,18 +5,18 @@ name = "pypi" [packages] requests = "==2.31.0" -urllib3 = "==1.26.16" -boto3 = "==1.26.148" -pyjwt = "==2.7.0" -psycopg2-binary = "==2.9.6" -elasticsearch = "==8.8.0" -jira = "==3.5.1" -fastapi = "==0.96.0" -uvicorn = {version = "==0.22.0", extras = ["standard"]} +boto3 = "==1.28.40" +pyjwt = "==2.8.0" +psycopg2-binary = "==2.9.7" +elasticsearch = "==8.9.0" +jira = "==3.5.2" +fastapi = "==0.103.1" python-decouple = "==3.8" -pydantic = {version = "==1.10.8", extras = ["email"]} -apscheduler = "==3.10.1" -redis = "==4.5.5" +apscheduler = "==3.10.4" +redis = "==5.0.0" +urllib3 = "==1.26.16" +uvicorn = {version = "==0.23.2", extras = ["standard"]} +pydantic = {version = "==2.3.0", extras = ["email"]} [dev-packages] diff --git a/api/auth/auth_jwt.py b/api/auth/auth_jwt.py index e8824b6b9..89d8f5bda 100644 --- a/api/auth/auth_jwt.py +++ b/api/auth/auth_jwt.py @@ -18,7 +18,7 @@ class JWTAuth(HTTPBearer): if credentials: if not credentials.scheme == "Bearer": raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.") - jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials) + jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials) auth_exists = jwt_payload is not None \ and users.auth_exists(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1), @@ -27,18 +27,13 @@ class JWTAuth(HTTPBearer): if jwt_payload is None \ or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \ or not auth_exists: - print("JWTAuth: Token issue") if jwt_payload is not None: print(jwt_payload) - print(f"JWTAuth: user_id={jwt_payload.get('userId')} tenant_id={jwt_payload.get('tenantId')}") - if jwt_payload is None: - print("JWTAuth: jwt_payload is None") - print(credentials.scheme + " " + credentials.credentials) - if jwt_payload is not None and jwt_payload.get("iat") is None: - print("JWTAuth: iat is None") - if jwt_payload is not None and jwt_payload.get("aud") is None: - print("JWTAuth: aud is None") - if jwt_payload is not None and not auth_exists: + if jwt_payload.get("iat") is None: + print("JWTAuth: iat is None") + if jwt_payload.get("aud") is None: + print("JWTAuth: aud is None") + if not auth_exists: print("JWTAuth: not users.auth_exists") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") @@ -47,10 +42,9 @@ class JWTAuth(HTTPBearer): print("JWTAuth: User not found.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.") jwt_payload["authorizer_identity"] = "jwt" - print(jwt_payload) request.state.authorizer_identity = "jwt" - request.state.currentContext = schemas.CurrentContext(tenant_id=jwt_payload.get("tenantId", -1), - user_id=jwt_payload.get("userId", -1), + request.state.currentContext = schemas.CurrentContext(tenantId=jwt_payload.get("tenantId", -1), + userId=jwt_payload.get("userId", -1), email=user["email"]) return request.state.currentContext diff --git a/api/chalicelib/core/alerts.py b/api/chalicelib/core/alerts.py index fae4e4883..e801ca789 100644 --- a/api/chalicelib/core/alerts.py +++ b/api/chalicelib/core/alerts.py @@ -55,7 +55,7 @@ def __process_circular(alert): def create(project_id, data: schemas.AlertSchema): - data = data.dict() + data = data.model_dump() data["query"] = json.dumps(data["query"]) data["options"] = json.dumps(data["options"]) @@ -72,7 +72,7 @@ def create(project_id, data: schemas.AlertSchema): def update(id, data: schemas.AlertSchema): - data = data.dict() + data = data.model_dump() data["query"] = json.dumps(data["query"]) data["options"] = json.dumps(data["options"]) diff --git a/api/chalicelib/core/authorizers.py b/api/chalicelib/core/authorizers.py index 46ec9e25a..55b16d94f 100644 --- a/api/chalicelib/core/authorizers.py +++ b/api/chalicelib/core/authorizers.py @@ -6,13 +6,12 @@ from chalicelib.core import tenants from chalicelib.core import users -def jwt_authorizer(token): - token = token.split(" ") - if len(token) != 2 or token[0].lower() != "bearer": +def jwt_authorizer(scheme: str, token: str): + if scheme.lower() != "bearer": return None try: payload = jwt.decode( - token[1], + token, config("jwt_secret"), algorithms=config("jwt_algorithm"), audience=[f"front:{helper.get_stage_name()}"] @@ -22,6 +21,7 @@ def jwt_authorizer(token): return None except BaseException as e: print("! JWT Base Exception") + print(e) return None return payload diff --git a/api/chalicelib/core/click_maps.py b/api/chalicelib/core/click_maps.py index 2383aeb1c..84f27ef8e 100644 --- a/api/chalicelib/core/click_maps.py +++ b/api/chalicelib/core/click_maps.py @@ -27,7 +27,7 @@ COALESCE((SELECT TRUE AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """ -def search_short_session(data: schemas.FlatClickMapSessionsSearch, project_id, user_id, include_mobs: bool = True): +def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_id, include_mobs: bool = True): no_platform = True for f in data.filters: if f.type == schemas.FilterType.platform: @@ -62,7 +62,7 @@ def search_short_session(data: schemas.FlatClickMapSessionsSearch, project_id, u print("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION -----------") print(main_query.decode('UTF-8')) print("--------- PAYLOAD -----------") - print(data.json()) + print(data.model_dump_json()) print("--------------------") raise err diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 5ecb4741d..f751c5a9b 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -13,25 +13,24 @@ from chalicelib.utils.storage import StorageClient PIE_CHART_GROUP = 5 +# TODO: refactor this to split +# timeseries / +# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs def __try_live(project_id, data: schemas.CardSchema): results = [] for i, s in enumerate(data.series): - s.filter.startDate = data.startTimestamp - s.filter.endDate = data.endTimestamp results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, view_type=data.view_type, metric_type=data.metric_type, metric_of=data.metric_of, metric_value=data.metric_value)) if data.view_type == schemas.MetricTimeseriesViewType.progress: r = {"count": results[-1]} - diff = s.filter.endDate - s.filter.startDate - s.filter.endDate = s.filter.startDate - s.filter.startDate = s.filter.endDate - diff + diff = s.filter.endTimestamp - s.filter.startTimestamp + s.filter.endTimestamp = s.filter.startTimestamp + s.filter.startTimestamp = s.filter.endTimestamp - diff r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, view_type=data.view_type, metric_type=data.metric_type, metric_of=data.metric_of, metric_value=data.metric_value) r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"]) - # r["countProgress"] = ((r["count"] - r["previousCount"]) / r["previousCount"]) * 100 \ - # if r["previousCount"] > 0 else 0 r["seriesName"] = s.name if s.name else i + 1 r["seriesId"] = s.series_id if s.series_id else None results[-1] = r @@ -50,14 +49,12 @@ def __is_funnel_chart(data: schemas.CardSchema): return data.metric_type == schemas.MetricType.funnel -def __get_funnel_chart(project_id, data: schemas.CardSchema): +def __get_funnel_chart(project_id: int, data: schemas.CardFunnel, user_id: int = None): if len(data.series) == 0: return { "stages": [], "totalDropDueToIssues": 0 } - data.series[0].filter.startDate = data.startTimestamp - data.series[0].filter.endDate = data.endTimestamp return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) @@ -72,10 +69,6 @@ def __get_errors_list(project_id, user_id, data: schemas.CardSchema): "total": 0, "errors": [] } - data.series[0].filter.startDate = data.startTimestamp - data.series[0].filter.endDate = data.endTimestamp - data.series[0].filter.page = data.page - data.series[0].filter.limit = data.limit return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) @@ -91,10 +84,6 @@ def __get_sessions_list(project_id, user_id, data: schemas.CardSchema): "total": 0, "sessions": [] } - data.series[0].filter.startDate = data.startTimestamp - data.series[0].filter.endDate = data.endTimestamp - data.series[0].filter.page = data.page - data.series[0].filter.limit = data.limit return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id) @@ -106,48 +95,33 @@ def __is_click_map(data: schemas.CardSchema): return data.metric_type == schemas.MetricType.click_map -def __get_click_map_chart(project_id, user_id, data: schemas.CardSchema, include_mobs: bool = True): +def __get_click_map_chart(project_id, user_id, data: schemas.CardClickMap, include_mobs: bool = True): if len(data.series) == 0: return None - data.series[0].filter.startDate = data.startTimestamp - data.series[0].filter.endDate = data.endTimestamp return click_maps.search_short_session(project_id=project_id, user_id=user_id, - data=schemas.FlatClickMapSessionsSearch(**data.series[0].filter.dict()), + data=schemas.ClickMapSessionsSearch( + **data.series[0].filter.model_dump()), include_mobs=include_mobs) -def __get_path_analysis_chart(project_id, data: schemas.CardSchema): +def __get_path_analysis_chart(project_id: int, user_id: int, data: schemas.CardPathAnalysis): if len(data.series) == 0: - data.series.append(schemas.CardSeriesSchema()) + data.series.append( + schemas.CardPathAnalysisSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp)) elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema): data.series[0].filter = schemas.PathAnalysisSchema() - data.series[0].filter.startTimestamp = data.startTimestamp - data.series[0].filter.endTimestamp = data.endTimestamp - return product_analytics.path_analysis(project_id=project_id, - data=schemas.PathAnalysisSchema(**data.series[0].filter.dict())) + + return product_analytics.path_analysis(project_id=project_id, data=data.series[0].filter, density=data.density, + selected_event_type=data.metric_value, hide_minor_paths=data.hide_excess) def __is_path_analysis(data: schemas.CardSchema): return data.metric_type == schemas.MetricType.pathAnalysis -def merged_live(project_id, data: schemas.CardSchema, user_id=None): - if data.is_template: - return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.dict()) - elif __is_funnel_chart(data): - return __get_funnel_chart(project_id=project_id, data=data) - elif __is_errors_list(data): - return __get_errors_list(project_id=project_id, user_id=user_id, data=data) - elif __is_sessions_list(data): - return __get_sessions_list(project_id=project_id, user_id=user_id, data=data) - elif __is_click_map(data): - return __get_click_map_chart(project_id=project_id, user_id=user_id, data=data) - elif __is_path_analysis(data): - return __get_path_analysis_chart(project_id=project_id, data=data) - elif len(data.series) == 0: - return [] +def __get_timeseries_chart(project_id: int, data: schemas.CardTimeSeries, user_id: int = None): series_charts = __try_live(project_id=project_id, data=data) - if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: + if data.view_type == schemas.MetricTimeseriesViewType.progress: return series_charts results = [{}] * len(series_charts[0]) for i in range(len(results)): @@ -157,26 +131,131 @@ def merged_live(project_id, data: schemas.CardSchema, user_id=None): return results +def empty(**args): + raise Exception("not supported") + + +def __get_table_of_user_ids(project_id: int, data: schemas.CardTable, user_id: int = None): + series_charts = __try_live(project_id=project_id, data=data) + return series_charts + + +def __get_table_of_sessions(project_id: int, data: schemas.CardTable, user_id): + return __get_sessions_list(project_id=project_id, user_id=user_id, data=data) + + +def __get_table_of_errors(project_id: int, data: schemas.CardTable, user_id: int): + return __get_errors_list(project_id=project_id, user_id=user_id, data=data) + + +def __get_table_of_issues(project_id: int, data: schemas.CardTable, user_id: int = None): + return __try_live(project_id=project_id, data=data) + + +def __get_table_of_browsers(project_id: int, data: schemas.CardTable, user_id: int = None): + return __try_live(project_id=project_id, data=data) + + +def __get_table_of_devises(project_id: int, data: schemas.CardTable, user_id: int = None): + return __try_live(project_id=project_id, data=data) + + +def __get_table_of_countries(project_id: int, data: schemas.CardTable, user_id: int = None): + return __try_live(project_id=project_id, data=data) + + +def __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int = None): + return __try_live(project_id=project_id, data=data) + + +def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int): + supported = { + schemas.MetricOfTable.sessions: __get_table_of_sessions, + schemas.MetricOfTable.errors: __get_table_of_errors, + schemas.MetricOfTable.user_id: __get_table_of_user_ids, + schemas.MetricOfTable.issues: __get_table_of_issues, + schemas.MetricOfTable.user_browser: __get_table_of_browsers, + schemas.MetricOfTable.user_device: __get_table_of_devises, + schemas.MetricOfTable.user_country: __get_table_of_countries, + schemas.MetricOfTable.visited_url: __get_table_of_urls, + } + return supported.get(data.metric_of, empty)(project_id=project_id, data=data, user_id=user_id) + + +def get_chart(project_id: int, data: schemas.CardSchema, user_id: int): + if data.is_template: + return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.model_dump()) + + supported = { + schemas.MetricType.timeseries: __get_timeseries_chart, + schemas.MetricType.table: __get_table_chart, + schemas.MetricType.click_map: __get_click_map_chart, + schemas.MetricType.funnel: __get_funnel_chart, + schemas.MetricType.insights: empty, + schemas.MetricType.pathAnalysis: __get_path_analysis_chart + } + return supported.get(data.metric_type, empty)(project_id=project_id, data=data, user_id=user_id) + + +def merged_live(project_id, data: schemas.CardSchema, user_id=None): + return get_chart(project_id=project_id, data=data, user_id=user_id) + print("---1") + if data.is_template: + print("---2") + return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.model_dump()) + elif __is_funnel_chart(data): + print("---3") + return __get_funnel_chart(project_id=project_id, data=data) + elif __is_errors_list(data): + print("---4") + return __get_errors_list(project_id=project_id, user_id=user_id, data=data) + elif __is_sessions_list(data): + print("---5") + return __get_sessions_list(project_id=project_id, user_id=user_id, data=data) + elif __is_click_map(data): + print("---6") + return __get_click_map_chart(project_id=project_id, user_id=user_id, data=data) + elif __is_path_analysis(data): + print("---7") + return __get_path_analysis_chart(project_id=project_id, data=data) + elif len(data.series) == 0: + print("---8") + return [] + series_charts = __try_live(project_id=project_id, data=data) + print("---9") + if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: + print("---10") + return series_charts + results = [{}] * len(series_charts[0]) + print("---11") + for i in range(len(results)): + for j, series_chart in enumerate(series_charts): + results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"], + data.series[j].name if data.series[j].name else j + 1: series_chart[i]["count"]} + return results + + def __merge_metric_with_data(metric: schemas.CardSchema, - data: schemas.CardChartSchema) -> schemas.CardSchema: + data: schemas.CardSessionsSchema) -> schemas.CardSchema: if data.series is not None and len(data.series) > 0: metric.series = data.series - metric: schemas.CardSchema = schemas.CardSchema( - **{**data.dict(by_alias=True), **metric.dict(by_alias=True)}) + # TODO: try to refactor this + metric: schemas.CardSchema = schemas.CardSchema(**{**data.model_dump(by_alias=True), + **metric.model_dump(by_alias=True)}) if len(data.filters) > 0 or len(data.events) > 0: for s in metric.series: if len(data.filters) > 0: s.filter.filters += data.filters if len(data.events) > 0: s.filter.events += data.events - metric.limit = data.limit - metric.page = data.page - metric.startTimestamp = data.startTimestamp - metric.endTimestamp = data.endTimestamp + # metric.limit = data.limit + # metric.page = data.page + # metric.startTimestamp = data.startTimestamp + # metric.endTimestamp = data.endTimestamp return metric -def make_chart(project_id, user_id, data: schemas.CardChartSchema, metric: schemas.CardSchema): +def make_chart(project_id, user_id, data: schemas.CardSessionsSchema, metric: schemas.CardSchema): if metric is None: return None metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) @@ -198,10 +277,10 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchem # if __is_click_map(metric) and raw_metric.get("data") is not None: # is_click_map = True for s in metric.series: - s.filter.startDate = data.startTimestamp - s.filter.endDate = data.endTimestamp - s.filter.limit = data.limit - s.filter.page = data.page + # s.filter.startTimestamp = data.startTimestamp + # s.filter.endTimestamp = data.endTimestamp + # s.filter.limit = data.limit + # s.filter.page = data.page # if is_click_map: # results.append( # {"seriesId": s.series_id, "seriesName": s.name, "total": 1, "sessions": [raw_metric["data"]]}) @@ -221,10 +300,6 @@ def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CardSessions if metric is None: return None for s in metric.series: - s.filter.startDate = data.startTimestamp - s.filter.endDate = data.endTimestamp - s.filter.limit = data.limit - s.filter.page = data.page return {"seriesId": s.series_id, "seriesName": s.name, **funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)} @@ -238,23 +313,15 @@ def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSc if metric is None: return None for s in metric.series: - s.filter.startDate = data.startTimestamp - s.filter.endDate = data.endTimestamp - s.filter.limit = data.limit - s.filter.page = data.page return {"seriesId": s.series_id, "seriesName": s.name, **errors.search(data=s.filter, project_id=project_id, user_id=user_id)} def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema): results = [] - if data.series is None: + if len(data.series) == 0: return results for s in data.series: - s.filter.startDate = data.startTimestamp - s.filter.endDate = data.endTimestamp - s.filter.limit = data.limit - s.filter.page = data.page if len(data.filters) > 0: s.filter.filters += data.filters if len(data.events) > 0: @@ -265,7 +332,7 @@ def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema): return results -def create(project_id, user_id, data: schemas.CardSchema, dashboard=False): +def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False): with pg_client.PostgresClient() as cur: session_data = None if __is_click_map(data): @@ -275,13 +342,13 @@ def create(project_id, user_id, data: schemas.CardSchema, dashboard=False): session_data = json.dumps(session_data) _data = {"session_data": session_data} for i, s in enumerate(data.series): - for k in s.dict().keys(): + for k in s.model_dump().keys(): _data[f"{k}_{i}"] = s.__getattribute__(k) _data[f"index_{i}"] = i _data[f"filter_{i}"] = s.filter.json() series_len = len(data.series) - params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data} - params["default_config"] = json.dumps(data.default_config.dict()) + params = {"user_id": user_id, "project_id": project_id, **data.model_dump(), **_data} + params["default_config"] = json.dumps(data.default_config.model_dump()) query = """INSERT INTO metrics (project_id, user_id, name, is_public, view_type, metric_type, metric_of, metric_value, metric_format, default_config, thumbnail, data) @@ -307,7 +374,7 @@ def create(project_id, user_id, data: schemas.CardSchema, dashboard=False): return {"data": get_card(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)} -def update(metric_id, user_id, project_id, data: schemas.UpdateCardSchema): +def update_card(metric_id, user_id, project_id, data: schemas.CardSchema): metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) if metric is None: return None @@ -320,7 +387,7 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCardSchema): "user_id": user_id, "project_id": project_id, "view_type": data.view_type, "metric_type": data.metric_type, "metric_of": data.metric_of, "metric_value": data.metric_value, "metric_format": data.metric_format, - "config": json.dumps(data.default_config.dict()), "thumbnail": data.thumbnail} + "config": json.dumps(data.default_config.model_dump()), "thumbnail": data.thumbnail} for i, s in enumerate(data.series): prefix = "u_" if s.index is None: @@ -331,7 +398,7 @@ def update(metric_id, user_id, project_id, data: schemas.UpdateCardSchema): else: u_series.append({"i": i, "s": s}) u_series_ids.append(s.series_id) - ns = s.dict() + ns = s.model_dump() for k in ns.keys(): if k == "filter": ns[k] = json.dumps(ns[k]) @@ -453,7 +520,7 @@ def get_all(project_id, user_id): return result -def delete(project_id, metric_id, user_id): +def delete_card(project_id, metric_id, user_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify("""\ @@ -562,8 +629,8 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, if metric is None: return None for s in metric.series: - s.filter.startDate = data.startTimestamp - s.filter.endDate = data.endTimestamp + s.filter.startTimestamp = data.startTimestamp + s.filter.endTimestamp = data.endTimestamp s.filter.limit = data.limit s.filter.page = data.page issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {}) @@ -589,13 +656,15 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, "issue": issue} -def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChartSchema): +def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessionsSchema): raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True) if raw_metric is None: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found") + raw_metric["startTimestamp"] = data.startTimestamp + raw_metric["endTimestamp"] = data.endTimestamp metric: schemas.CardSchema = schemas.CardSchema(**raw_metric) if metric.is_template: - return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.dict()) + return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.model_dump()) elif __is_click_map(metric): if raw_metric["data"]: keys = sessions_mobs. \ @@ -615,53 +684,52 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChart return make_chart(project_id=project_id, user_id=user_id, data=data, metric=metric) -PREDEFINED = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions, - schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time, - schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time, - schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time, - schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start, - schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel, - schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages, - schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration, - schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time, - schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time, - schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time, - schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint, - schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded, - schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit, - schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive, - schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests, - schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render, - schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption, - schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu, - schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps, - schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors, - schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx, - schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx, - schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains, - schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors, - schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type, - schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party, - schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location, - schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains, - schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser, - schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render, - schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages, - schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption, - schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu, - schemas.MetricOfPerformance.fps: metrics.get_avg_fps, - schemas.MetricOfPerformance.crashes: metrics.get_crashes, - schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete, - schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time, - schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time, - schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution, - schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend, - schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources, - schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time, - schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end, - schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, } - - def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \ schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict): - return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data) + supported = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions, + schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time, + schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time, + schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time, + schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start, + schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel, + schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages, + schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration, + schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time, + schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time, + schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time, + schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint, + schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded, + schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit, + schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive, + schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests, + schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render, + schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption, + schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu, + schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps, + schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors, + schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx, + schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx, + schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains, + schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors, + schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type, + schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party, + schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location, + schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains, + schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser, + schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render, + schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages, + schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption, + schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu, + schemas.MetricOfPerformance.fps: metrics.get_avg_fps, + schemas.MetricOfPerformance.crashes: metrics.get_crashes, + schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete, + schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time, + schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time, + schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution, + schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend, + schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources, + schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time, + schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end, + schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, } + + return supported.get(key, lambda *args: None)(project_id=project_id, **data) diff --git a/api/chalicelib/core/dashboards.py b/api/chalicelib/core/dashboards.py index 4161846a4..ed4ebcb6c 100644 --- a/api/chalicelib/core/dashboards.py +++ b/api/chalicelib/core/dashboards.py @@ -12,7 +12,7 @@ def create_dashboard(project_id, user_id, data: schemas.CreateDashboardSchema): pg_query = f"""INSERT INTO dashboards(project_id, user_id, name, is_public, is_pinned, description) VALUES(%(projectId)s, %(userId)s, %(name)s, %(is_public)s, %(is_pinned)s, %(description)s) RETURNING *""" - params = {"userId": user_id, "projectId": project_id, **data.dict()} + params = {"userId": user_id, "projectId": project_id, **data.model_dump()} if data.metrics is not None and len(data.metrics) > 0: pg_query = f"""WITH dash AS ({pg_query}) INSERT INTO dashboard_widgets(dashboard_id, metric_id, user_id, config) @@ -109,7 +109,7 @@ def update_dashboard(project_id, user_id, dashboard_id, data: schemas.EditDashbo pg_query = """SELECT COALESCE(COUNT(*),0) AS count FROM dashboard_widgets WHERE dashboard_id = %(dashboard_id)s;""" - params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()} + params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.model_dump()} cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() offset = row["count"] @@ -178,7 +178,7 @@ def add_widget(project_id, user_id, dashboard_id, data: schemas.AddWidgetToDashb AND dashboard_id = %(dashboard_id)s AND (dashboards.user_id = %(userId)s OR is_public)) RETURNING *;""" - params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.dict()} + params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, **data.model_dump()} params["config"] = json.dumps(data.config) cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() @@ -192,7 +192,7 @@ def update_widget(project_id, user_id, dashboard_id, widget_id, data: schemas.Up WHERE dashboard_id=%(dashboard_id)s AND widget_id=%(widget_id)s RETURNING *;""" params = {"userId": user_id, "projectId": project_id, "dashboard_id": dashboard_id, - "widget_id": widget_id, **data.dict()} + "widget_id": widget_id, **data.model_dump()} params["config"] = json.dumps(data.config) cur.execute(cur.mogrify(pg_query, params)) row = cur.fetchone() @@ -224,7 +224,7 @@ def pin_dashboard(project_id, user_id, dashboard_id): def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.CardSchema): - metric_id = custom_metrics.create(project_id=project_id, user_id=user_id, data=data, dashboard=True) + metric_id = custom_metrics.create_card(project_id=project_id, user_id=user_id, data=data, dashboard=True) return add_widget(project_id=project_id, user_id=user_id, dashboard_id=dashboard_id, data=schemas.AddWidgetToDashboardPayloadSchema(metricId=metric_id)) @@ -234,7 +234,7 @@ def create_metric_add_widget(project_id, user_id, dashboard_id, data: schemas.Ca # return None # metric = schemas.CustomMetricAndTemplate = schemas.CustomMetricAndTemplate(**raw_metric) # if metric.is_template: -# return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.dict()) +# return get_predefined_metric(key=metric.predefined_key, project_id=project_id, data=data.model_dump()) # else: # return custom_metrics.make_chart(project_id=project_id, user_id=user_id, metric_id=raw_metric["metricId"], # data=data, metric=raw_metric) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index 3ff0cf584..28b4a8aaa 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -454,10 +454,10 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): pg_sub_query_chart.append("errors.error_id =details.error_id") statuses = [] error_ids = None - if data.startDate is None: - data.startDate = TimeUTC.now(-30) - if data.endDate is None: - data.endDate = TimeUTC.now(1) + if data.startTimestamp is None: + data.startTimestamp = TimeUTC.now(-30) + if data.endTimestamp is None: + data.endTimestamp = TimeUTC.now(1) if len(data.events) > 0 or len(data.filters) > 0: print("-- searching for sessions before errors") statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True, @@ -466,18 +466,18 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): return empty_response error_ids = [e["errorId"] for e in statuses] with pg_client.PostgresClient() as cur: - step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1) + step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1) sort = __get_sort_key('datetime') if data.sort is not None: sort = __get_sort_key(data.sort) - order = schemas.SortOrderType.desc.value + order = schemas.SortOrderType.desc if data.order is not None: - order = data.order.value + order = data.order extra_join = "" params = { - "startDate": data.startDate, - "endDate": data.endDate, + "startDate": data.startTimestamp, + "endDate": data.endTimestamp, "project_id": project_id, "userId": user_id, "step_size": step_size} @@ -709,41 +709,3 @@ def change_state(project_id, user_id, error_id, action): for e in errors: e["status"] = row["status"] return {"data": errors} - - -MAX_RANK = 2 - - -def __status_rank(status): - return { - 'unresolved': MAX_RANK - 2, - 'ignored': MAX_RANK - 1, - 'resolved': MAX_RANK - }.get(status) - - -def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s) - SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed - FROM (SELECT root_error.error_id - FROM events.errors - INNER JOIN public.errors AS root_error USING (error_id) - LEFT JOIN user_viewed USING (error_id) - WHERE project_id = %(project_id)s - AND timestamp >= %(startTimestamp)s - AND timestamp <= %(endTimestamp)s - AND source = 'js_exception' - AND root_error.status = 'unresolved' - AND user_viewed.error_id ISNULL - LIMIT 1 - ) AS timed_errors;""", - {"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp}) - cur.execute(query=query) - row = cur.fetchone() - - return { - "data": helper.dict_to_camel_case(row) - } diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 17f601180..9439cff7c 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -110,11 +110,12 @@ class EventType: column=None) # column=None because errors are searched by name or message METADATA = Event(ui_type=schemas.FilterType.metadata, table="public.sessions", column=None) # IOS - CLICK_IOS = Event(ui_type=schemas.EventType.click_ios, table="events_ios.clicks", column="label") + CLICK_IOS = Event(ui_type=schemas.EventType.click_ios, table="events_ios.taps", column="label") INPUT_IOS = Event(ui_type=schemas.EventType.input_ios, table="events_ios.inputs", column="label") VIEW_IOS = Event(ui_type=schemas.EventType.view_ios, table="events_ios.views", column="name") + SWIPE_IOS = Event(ui_type=schemas.EventType.swipe_ios, table="events_ios.swipes", column="label") CUSTOM_IOS = Event(ui_type=schemas.EventType.custom_ios, table="events_common.customs", column="name") - REQUEST_IOS = Event(ui_type=schemas.EventType.request_ios, table="events_common.requests", column="url") + REQUEST_IOS = Event(ui_type=schemas.EventType.request_ios, table="events_common.requests", column="path") ERROR_IOS = Event(ui_type=schemas.EventType.error_ios, table="events_ios.crashes", column=None) # column=None because errors are searched by name or message diff --git a/api/chalicelib/core/events_ios.py b/api/chalicelib/core/events_ios.py index c5205ba15..0811f52ff 100644 --- a/api/chalicelib/core/events_ios.py +++ b/api/chalicelib/core/events_ios.py @@ -2,20 +2,8 @@ from chalicelib.utils import pg_client, helper from chalicelib.core import events -def get_customs_by_sessionId(session_id, project_id): - with pg_client.PostgresClient() as cur: - cur.execute(cur.mogrify(f"""\ - SELECT - c.*, - '{events.EventType.CUSTOM_IOS.ui_type}' AS type - FROM {events.EventType.CUSTOM_IOS.table} AS c - WHERE - c.session_id = %(session_id)s - ORDER BY c.timestamp;""", - {"project_id": project_id, "session_id": session_id}) - ) - rows = cur.fetchall() - return helper.dict_to_camel_case(rows) +def get_customs_by_session_id(session_id, project_id): + return events.get_customs_by_session_id(session_id=session_id, project_id=project_id) def get_by_sessionId(session_id, project_id): @@ -23,8 +11,8 @@ def get_by_sessionId(session_id, project_id): cur.execute(cur.mogrify(f""" SELECT c.*, - '{events.EventType.CLICK_IOS.ui_type}' AS type - FROM {events.EventType.CLICK_IOS.table} AS c + 'TAP' AS type + FROM events_ios.taps AS c WHERE c.session_id = %(session_id)s ORDER BY c.timestamp;""", @@ -35,8 +23,8 @@ def get_by_sessionId(session_id, project_id): cur.execute(cur.mogrify(f""" SELECT i.*, - '{events.EventType.INPUT_IOS.ui_type}' AS type - FROM {events.EventType.INPUT_IOS.table} AS i + 'INPUT' AS type + FROM events_ios.inputs AS i WHERE i.session_id = %(session_id)s ORDER BY i.timestamp;""", @@ -46,12 +34,21 @@ def get_by_sessionId(session_id, project_id): cur.execute(cur.mogrify(f""" SELECT v.*, - '{events.EventType.VIEW_IOS.ui_type}' AS type - FROM {events.EventType.VIEW_IOS.table} AS v + 'VIEW' AS type + FROM events_ios.views AS v WHERE v.session_id = %(session_id)s ORDER BY v.timestamp;""", {"project_id": project_id, "session_id": session_id})) rows += cur.fetchall() + cur.execute(cur.mogrify(f""" + SELECT + s.*, + 'SWIPE' AS type + FROM events_ios.swipes AS s + WHERE + s.session_id = %(session_id)s + ORDER BY s.timestamp;""", {"project_id": project_id, "session_id": session_id})) + rows += cur.fetchall() rows = helper.list_to_camel_case(rows) rows = sorted(rows, key=lambda k: k["timestamp"]) return rows diff --git a/api/chalicelib/core/feature_flags.py b/api/chalicelib/core/feature_flags.py index 45ace2964..63755e9a7 100644 --- a/api/chalicelib/core/feature_flags.py +++ b/api/chalicelib/core/feature_flags.py @@ -63,7 +63,7 @@ def search_feature_flags(project_id: int, user_id: int, data: schemas.SearchFlag SELECT COUNT(1) OVER () AS count, {", ".join(feature_flag_columns)} FROM feature_flags WHERE {" AND ".join(constraints)} - ORDER BY updated_at {data.order.value} + ORDER BY updated_at {data.order} LIMIT %(limit)s OFFSET %(offset)s; """ @@ -206,7 +206,7 @@ def prepare_params_to_create_flag(feature_flag_data, project_id, user_id): params = { "project_id": project_id, "created_by": user_id, - **feature_flag_data.dict(), + **feature_flag_data.model_dump(), **conditions_data, **variants_data, "payload": json.dumps(feature_flag_data.payload) @@ -218,7 +218,7 @@ def prepare_params_to_create_flag(feature_flag_data, project_id, user_id): def prepare_variants_values(feature_flag_data): variants_data = {} for i, v in enumerate(feature_flag_data.variants): - for k in v.dict().keys(): + for k in v.model_dump().keys(): variants_data[f"v_{k}_{i}"] = v.__getattribute__(k) variants_data[f"v_value_{i}"] = v.value variants_data[f"v_description_{i}"] = v.description @@ -230,11 +230,11 @@ def prepare_variants_values(feature_flag_data): def prepare_conditions_values(feature_flag_data): conditions_data = {} for i, s in enumerate(feature_flag_data.conditions): - for k in s.dict().keys(): + for k in s.model_dump().keys(): conditions_data[f"{k}_{i}"] = s.__getattribute__(k) conditions_data[f"name_{i}"] = s.name conditions_data[f"rollout_percentage_{i}"] = s.rollout_percentage - conditions_data[f"filters_{i}"] = json.dumps([filter_.dict() for filter_ in s.filters]) + conditions_data[f"filters_{i}"] = json.dumps([filter_.model_dump() for filter_ in s.filters]) return conditions_data @@ -299,7 +299,7 @@ def create_conditions(feature_flag_id: int, conditions: List[schemas.FeatureFlag with pg_client.PostgresClient() as cur: params = [ - (feature_flag_id, c.name, c.rollout_percentage, json.dumps([filter_.dict() for filter_ in c.filters])) + (feature_flag_id, c.name, c.rollout_percentage, json.dumps([filter_.model_dump() for filter_ in c.filters])) for c in conditions] query = cur.mogrify(sql, params) cur.execute(query) @@ -327,10 +327,10 @@ def update_feature_flag(project_id: int, feature_flag_id: int, ) params = { - **feature_flag.dict(), "updated_by": user_id, "feature_flag_id": feature_flag_id, "project_id": project_id, + **feature_flag.model_dump(), "payload": json.dumps(feature_flag.payload), } diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index c9f51fcc9..ba79abc70 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -6,7 +6,7 @@ from chalicelib.utils import helper from chalicelib.utils import sql_helper as sh -def filter_stages(stages: List[schemas._SessionSearchEventSchema]): +def filter_stages(stages: List[schemas.SessionSearchEventSchema2]): ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input, schemas.EventType.location, schemas.EventType.custom, schemas.EventType.click_ios, schemas.EventType.input_ios, @@ -15,10 +15,10 @@ def filter_stages(stages: List[schemas._SessionSearchEventSchema]): def __parse_events(f_events: List[dict]): - return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events] + return [schemas.SessionSearchEventSchema2.parse_obj(e) for e in f_events] -def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]): +def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]): if f_events is None: return events = [] @@ -41,7 +41,7 @@ def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilte data.events = __fix_stages(data.events) if len(data.events) == 0: return {"stages": [], "totalDropDueToIssues": 0} - insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id) + insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data, project_id=project_id) insights = helper.list_to_camel_case(insights) if len(insights) > 0: # TODO: check if this correct @@ -64,5 +64,5 @@ def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchem return { "issues": helper.dict_to_camel_case( - significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1, + significance.get_issues_list(filter_d=data, project_id=project_id, first_stage=1, last_stage=len(data.events)))} diff --git a/api/chalicelib/core/heatmaps.py b/api/chalicelib/core/heatmaps.py index 528318cde..436fbcce1 100644 --- a/api/chalicelib/core/heatmaps.py +++ b/api/chalicelib/core/heatmaps.py @@ -4,7 +4,7 @@ from chalicelib.utils import helper, pg_client def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema): - args = {"startDate": data.startDate, "endDate": data.endDate, + args = {"startDate": data.startTimestamp, "endDate": data.endTimestamp, "project_id": project_id, "url": data.url} constraints = ["sessions.project_id = %(project_id)s", "(url = %(url)s OR path= %(url)s)", @@ -43,13 +43,6 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema): f.value, value_key=f_k)) constraints.append(sh.multi_conditions(f"mis.type = %({f_k})s", f.value, value_key=f_k)) - if len(f.filters) > 0: - for j, sf in enumerate(f.filters): - f_k = f"issue_svalue{i}{j}" - args = {**args, **sh.multi_values(sf.value, value_key=f_k)} - if sf.type == schemas.IssueFilterType._selector and len(sf.value) > 0: - constraints.append(sh.multi_conditions(f"clicks.selector = %({f_k})s", - sf.value, value_key=f_k)) if data.click_rage and not has_click_rage_filter: constraints.append("""(issues.session_id IS NULL diff --git a/api/chalicelib/core/integration_github.py b/api/chalicelib/core/integration_github.py index 0be412122..41cd292f6 100644 --- a/api/chalicelib/core/integration_github.py +++ b/api/chalicelib/core/integration_github.py @@ -72,16 +72,15 @@ class GitHubIntegration(integration_base.BaseIntegration): ) return {"state": "success"} - def add_edit(self, data): + def add_edit(self, data: schemas.IssueTrackingGithubSchema): s = self.get() if s is not None: return self.update( changes={ - "token": data["token"] \ - if data.get("token") and len(data["token"]) > 0 and data["token"].find("***") == -1 \ - else s["token"] + "token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \ + else s.token }, obfuscate=True ) else: - return self.add(token=data["token"], obfuscate=True) + return self.add(token=data.token, obfuscate=True) diff --git a/api/chalicelib/core/integration_jira_cloud.py b/api/chalicelib/core/integration_jira_cloud.py index 03d75f1e4..c15e8e4c9 100644 --- a/api/chalicelib/core/integration_jira_cloud.py +++ b/api/chalicelib/core/integration_jira_cloud.py @@ -113,21 +113,20 @@ class JIRAIntegration(integration_base.BaseIntegration): ) return {"state": "success"} - def add_edit(self, data): + def add_edit(self, data: schemas.IssueTrackingJiraSchema): if self.integration is not None: return self.update( changes={ - "username": data["username"], - "token": data["token"] \ - if data.get("token") and len(data["token"]) > 0 and data["token"].find("***") == -1 \ - else self.integration["token"], - "url": data["url"] + "username": data.username, + "token": data.token if len(data.token) > 0 and data.token.find("***") == -1 \ + else self.integration.token, + "url": data.url }, obfuscate=True ) else: return self.add( - username=data["username"], - token=data["token"], - url=data["url"] + username=data.username, + token=data.token, + url=data.url ) diff --git a/api/chalicelib/core/log_tool_bugsnag.py b/api/chalicelib/core/log_tool_bugsnag.py index 8354142ab..9f1471421 100644 --- a/api/chalicelib/core/log_tool_bugsnag.py +++ b/api/chalicelib/core/log_tool_bugsnag.py @@ -1,6 +1,8 @@ from chalicelib.core import log_tools import requests +from schemas import schemas + IN_TY = "bugsnag" @@ -60,14 +62,14 @@ def delete(tenant_id, project_id): return log_tools.delete(project_id=project_id, integration=IN_TY) -def add_edit(tenant_id, project_id, data): +def add_edit(tenant_id, project_id, data:schemas.IntegrationBugsnagSchema ): s = get(project_id) if s is not None: return update(tenant_id=tenant_id, project_id=project_id, - changes={"authorizationToken": data["authorizationToken"], - "bugsnagProjectId": data["bugsnagProjectId"]}) + changes={"authorizationToken": data.authorization_token, + "bugsnagProjectId": data.bugsnag_project_id}) else: return add(tenant_id=tenant_id, project_id=project_id, - authorization_token=data["authorizationToken"], - bugsnag_project_id=data["bugsnagProjectId"]) + authorization_token=data.authorization_token, + bugsnag_project_id=data.bugsnag_project_id) diff --git a/api/chalicelib/core/log_tool_cloudwatch.py b/api/chalicelib/core/log_tool_cloudwatch.py index a42de4943..5600d2fea 100644 --- a/api/chalicelib/core/log_tool_cloudwatch.py +++ b/api/chalicelib/core/log_tool_cloudwatch.py @@ -1,5 +1,6 @@ import boto3 from chalicelib.core import log_tools +from schemas import schemas IN_TY = "cloudwatch" @@ -102,18 +103,18 @@ def delete(tenant_id, project_id): return log_tools.delete(project_id=project_id, integration=IN_TY) -def add_edit(tenant_id, project_id, data): +def add_edit(tenant_id, project_id, data: schemas.IntegrationCloudwatchSchema): s = get(project_id) if s is not None: return update(tenant_id=tenant_id, project_id=project_id, - changes={"awsAccessKeyId": data["awsAccessKeyId"], - "awsSecretAccessKey": data["awsSecretAccessKey"], - "logGroupName": data["logGroupName"], - "region": data["region"]}) + changes={"awsAccessKeyId": data.aws_access_key_id, + "awsSecretAccessKey": data.aws_secret_access_key, + "logGroupName": data.log_group_name, + "region": data.region}) else: return add(tenant_id=tenant_id, project_id=project_id, - aws_access_key_id=data["awsAccessKeyId"], - aws_secret_access_key=data["awsSecretAccessKey"], - log_group_name=data["logGroupName"], - region=data["region"]) + aws_access_key_id=data.aws_access_key_id, + aws_secret_access_key=data.aws_secret_access_key, + log_group_name=data.log_group_name, + region=data.region) diff --git a/api/chalicelib/core/log_tool_datadog.py b/api/chalicelib/core/log_tool_datadog.py index 68735f833..08b089b60 100644 --- a/api/chalicelib/core/log_tool_datadog.py +++ b/api/chalicelib/core/log_tool_datadog.py @@ -1,4 +1,5 @@ from chalicelib.core import log_tools +from schemas import schemas IN_TY = "datadog" @@ -30,14 +31,14 @@ def delete(tenant_id, project_id): return log_tools.delete(project_id=project_id, integration=IN_TY) -def add_edit(tenant_id, project_id, data): +def add_edit(tenant_id, project_id, data: schemas.IntegrationDatadogSchema): s = get(project_id) if s is not None: return update(tenant_id=tenant_id, project_id=project_id, - changes={"apiKey": data["apiKey"], - "applicationKey": data["applicationKey"]}) + changes={"apiKey": data.api_key, + "applicationKey": data.application_key}) else: return add(tenant_id=tenant_id, project_id=project_id, - api_key=data["apiKey"], - application_key=data["applicationKey"]) + api_key=data.api_key, + application_key=data.application_key) diff --git a/api/chalicelib/core/log_tool_elasticsearch.py b/api/chalicelib/core/log_tool_elasticsearch.py index 7b00074c6..6afc120b1 100644 --- a/api/chalicelib/core/log_tool_elasticsearch.py +++ b/api/chalicelib/core/log_tool_elasticsearch.py @@ -1,8 +1,9 @@ -# from elasticsearch import Elasticsearch, RequestsHttpConnection from elasticsearch import Elasticsearch from chalicelib.core import log_tools import logging +from schemas import schemas + logging.getLogger('elasticsearch').level = logging.ERROR IN_TY = "elasticsearch" @@ -44,17 +45,16 @@ def delete(tenant_id, project_id): return log_tools.delete(project_id=project_id, integration=IN_TY) -def add_edit(tenant_id, project_id, data): +def add_edit(tenant_id, project_id, data: schemas.IntegrationElasticsearchSchema): s = get(project_id) if s is not None: return update(tenant_id=tenant_id, project_id=project_id, - changes={"host": data["host"], "apiKeyId": data["apiKeyId"], "apiKey": data["apiKey"], - "indexes": data["indexes"], "port": data["port"]}) + changes={"host": data.host, "apiKeyId": data.api_key_id, "apiKey": data.api_key, + "indexes": data.indexes, "port": data.port}) else: - return add(tenant_id=tenant_id, - project_id=project_id, - host=data["host"], api_key=data["apiKey"], api_key_id=data["apiKeyId"], indexes=data["indexes"], - port=data["port"]) + return add(tenant_id=tenant_id, project_id=project_id, + host=data.host, api_key=data.api_key, api_key_id=data.api_key_id, + indexes=data.indexes, port=data.port) def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=15): @@ -64,15 +64,9 @@ def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=15): args = { "hosts": [{"host": host, "port": port, "scheme": scheme}], "verify_certs": False, - # "ca_certs": False, - # "connection_class": RequestsHttpConnection, "request_timeout": timeout, "api_key": (api_key_id, api_key) } - # if api_key_id is not None and len(api_key_id) > 0: - # # args["http_auth"] = (username, password) - # token = "ApiKey " + base64.b64encode(f"{api_key_id}:{api_key}".encode("utf-8")).decode("utf-8") - # args["headers"] = {"Authorization": token} es = Elasticsearch( **args ) @@ -88,8 +82,8 @@ def __get_es_client(host, port, api_key_id, api_key, use_ssl=False, timeout=15): return es -def ping(tenant_id, host, port, apiKeyId, apiKey): - es = __get_es_client(host, port, apiKeyId, apiKey, timeout=3) +def ping(tenant_id, data: schemas.IntegrationElasticsearchTestSchema): + es = __get_es_client(data.host, data.port, data.api_key_id, data.api_key, timeout=3) if es is None: return {"state": False} return {"state": es.ping()} diff --git a/api/chalicelib/core/log_tool_newrelic.py b/api/chalicelib/core/log_tool_newrelic.py index 14b0d4e57..1eb23c6aa 100644 --- a/api/chalicelib/core/log_tool_newrelic.py +++ b/api/chalicelib/core/log_tool_newrelic.py @@ -1,4 +1,5 @@ from chalicelib.core import log_tools +from schemas import schemas IN_TY = "newrelic" @@ -34,17 +35,16 @@ def delete(tenant_id, project_id): return log_tools.delete(project_id=project_id, integration=IN_TY) -def add_edit(tenant_id, project_id, data): +def add_edit(tenant_id, project_id, data: schemas.IntegrationNewrelicSchema): s = get(project_id) if s is not None: return update(tenant_id=tenant_id, project_id=project_id, - changes={"applicationId": data["applicationId"], - "xQueryKey": data["xQueryKey"], - "region": data["region"]}) + changes={"applicationId": data.application_id, + "xQueryKey": data.x_query_key, + "region": data.region}) else: return add(tenant_id=tenant_id, project_id=project_id, - application_id=data["applicationId"], - x_query_key=data["xQueryKey"], - region=data["region"] - ) + application_id=data.application_id, + x_query_key=data.x_query_key, + region=data.region) diff --git a/api/chalicelib/core/log_tool_rollbar.py b/api/chalicelib/core/log_tool_rollbar.py index 34f6c6114..34f934dc9 100644 --- a/api/chalicelib/core/log_tool_rollbar.py +++ b/api/chalicelib/core/log_tool_rollbar.py @@ -1,4 +1,5 @@ from chalicelib.core import log_tools +from schemas import schemas IN_TY = "rollbar" @@ -27,12 +28,12 @@ def delete(tenant_id, project_id): return log_tools.delete(project_id=project_id, integration=IN_TY) -def add_edit(tenant_id, project_id, data): +def add_edit(tenant_id, project_id, data: schemas.IntegrationRollbarSchema): s = get(project_id) if s is not None: return update(tenant_id=tenant_id, project_id=project_id, - changes={"accessToken": data["accessToken"]}) + changes={"accessToken": data.access_token}) else: return add(tenant_id=tenant_id, project_id=project_id, - access_token=data["accessToken"]) + access_token=data.access_token) diff --git a/api/chalicelib/core/log_tool_sentry.py b/api/chalicelib/core/log_tool_sentry.py index 1f51f1556..ce6aff21b 100644 --- a/api/chalicelib/core/log_tool_sentry.py +++ b/api/chalicelib/core/log_tool_sentry.py @@ -1,5 +1,6 @@ import requests from chalicelib.core import log_tools +from schemas import schemas IN_TY = "sentry" @@ -35,18 +36,19 @@ def delete(tenant_id, project_id): return log_tools.delete(project_id=project_id, integration=IN_TY) -def add_edit(tenant_id, project_id, data): +def add_edit(tenant_id, project_id, data: schemas.IntegrationSentrySchema): s = get(project_id) if s is not None: return update(tenant_id=tenant_id, project_id=project_id, - changes={"projectSlug": data["projectSlug"], - "organizationSlug": data["organizationSlug"], - "token": data["token"]}) + changes={"projectSlug": data.project_slug, + "organizationSlug": data.organization_slug, + "token": data.token}) else: return add(tenant_id=tenant_id, project_id=project_id, - project_slug=data["projectSlug"], - organization_slug=data["organizationSlug"], token=data["token"]) + project_slug=data.project_slug, + organization_slug=data.organization_slug, + token=data.token) def proxy_get(tenant_id, project_id, event_id): diff --git a/api/chalicelib/core/log_tool_stackdriver.py b/api/chalicelib/core/log_tool_stackdriver.py index 611a7c637..21b1619ea 100644 --- a/api/chalicelib/core/log_tool_stackdriver.py +++ b/api/chalicelib/core/log_tool_stackdriver.py @@ -1,4 +1,5 @@ from chalicelib.core import log_tools +from schemas import schemas IN_TY = "stackdriver" @@ -29,14 +30,13 @@ def delete(tenant_id, project_id): return log_tools.delete(project_id=project_id, integration=IN_TY) -def add_edit(tenant_id, project_id, data): +def add_edit(tenant_id, project_id, data: schemas.IntegartionStackdriverSchema): s = get(project_id) if s is not None: return update(tenant_id=tenant_id, project_id=project_id, - changes={"serviceAccountCredentials": data["serviceAccountCredentials"], - "logName": data["logName"]}) + changes={"serviceAccountCredentials": data.service_account_credentials, + "logName": data.log_name}) else: - return add(tenant_id=tenant_id, - project_id=project_id, - service_account_credentials=data["serviceAccountCredentials"], - log_name=data["logName"]) + return add(tenant_id=tenant_id, project_id=project_id, + service_account_credentials=data.service_account_credentials, + log_name=data.log_name) diff --git a/api/chalicelib/core/log_tool_sumologic.py b/api/chalicelib/core/log_tool_sumologic.py index 8e34ea70a..2e129302a 100644 --- a/api/chalicelib/core/log_tool_sumologic.py +++ b/api/chalicelib/core/log_tool_sumologic.py @@ -1,4 +1,5 @@ from chalicelib.core import log_tools +from schemas import schemas IN_TY = "sumologic" @@ -38,16 +39,16 @@ def delete(tenant_id, project_id): return log_tools.delete(project_id=project_id, integration=IN_TY) -def add_edit(tenant_id, project_id, data): +def add_edit(tenant_id, project_id, data: schemas.IntegrationSumologicSchema): s = get(project_id) if s is not None: return update(tenant_id=tenant_id, project_id=project_id, - changes={"accessId": data["accessId"], - "accessKey": data["accessKey"], - "region": data["region"]}) + changes={"accessId": data.access_id, + "accessKey": data.access_key, + "region": data.region}) else: return add(tenant_id=tenant_id, project_id=project_id, - access_id=data["accessId"], - access_key=data["accessKey"], - region=data["region"]) + access_id=data.access_id, + access_key=data.access_key, + region=data.region) diff --git a/api/chalicelib/core/metadata.py b/api/chalicelib/core/metadata.py index 909ecff23..e1f00a88e 100644 --- a/api/chalicelib/core/metadata.py +++ b/api/chalicelib/core/metadata.py @@ -243,45 +243,45 @@ def get_keys_by_projects(project_ids): return results -def add_edit_delete(tenant_id, project_id, new_metas): - old_metas = get(project_id) - old_indexes = [k["index"] for k in old_metas] - new_indexes = [k["index"] for k in new_metas if "index" in k] - new_keys = [k["key"] for k in new_metas] - - add_metas = [k["key"] for k in new_metas - if "index" not in k] - new_metas = {k["index"]: {"key": k["key"]} for - k in new_metas if - "index" in k} - old_metas = {k["index"]: {"key": k["key"]} for k in old_metas} - - if len(new_keys) > 20: - return {"errors": ["you cannot add more than 20 key"]} - for k in new_metas.keys(): - if re.match(regex, new_metas[k]["key"]) is None: - return {"errors": [f"invalid key {k}"]} - for k in add_metas: - if re.match(regex, k) is None: - return {"errors": [f"invalid key {k}"]} - if len(new_indexes) > len(set(new_indexes)): - return {"errors": ["duplicate indexes"]} - if len(new_keys) > len(set(new_keys)): - return {"errors": ["duplicate keys"]} - to_delete = list(set(old_indexes) - set(new_indexes)) - - with pg_client.PostgresClient() as cur: - for d in to_delete: - delete(tenant_id=tenant_id, project_id=project_id, index=d) - - for k in add_metas: - add(tenant_id=tenant_id, project_id=project_id, new_name=k) - - for k in new_metas.keys(): - if new_metas[k]["key"].lower() != old_metas[k]["key"]: - edit(tenant_id=tenant_id, project_id=project_id, index=k, new_name=new_metas[k]["key"]) - - return {"data": get(project_id)} +# def add_edit_delete(tenant_id, project_id, new_metas): +# old_metas = get(project_id) +# old_indexes = [k["index"] for k in old_metas] +# new_indexes = [k["index"] for k in new_metas if "index" in k] +# new_keys = [k["key"] for k in new_metas] +# +# add_metas = [k["key"] for k in new_metas +# if "index" not in k] +# new_metas = {k["index"]: {"key": k["key"]} for +# k in new_metas if +# "index" in k} +# old_metas = {k["index"]: {"key": k["key"]} for k in old_metas} +# +# if len(new_keys) > 20: +# return {"errors": ["you cannot add more than 20 key"]} +# for k in new_metas.keys(): +# if re.match(regex, new_metas[k]["key"]) is None: +# return {"errors": [f"invalid key {k}"]} +# for k in add_metas: +# if re.match(regex, k) is None: +# return {"errors": [f"invalid key {k}"]} +# if len(new_indexes) > len(set(new_indexes)): +# return {"errors": ["duplicate indexes"]} +# if len(new_keys) > len(set(new_keys)): +# return {"errors": ["duplicate keys"]} +# to_delete = list(set(old_indexes) - set(new_indexes)) +# +# with pg_client.PostgresClient() as cur: +# for d in to_delete: +# delete(tenant_id=tenant_id, project_id=project_id, index=d) +# +# for k in add_metas: +# add(tenant_id=tenant_id, project_id=project_id, new_name=k) +# +# for k in new_metas.keys(): +# if new_metas[k]["key"].lower() != old_metas[k]["key"]: +# edit(tenant_id=tenant_id, project_id=project_id, index=k, new_name=new_metas[k]["key"]) +# +# return {"data": get(project_id)} def get_remaining_metadata_with_count(tenant_id): diff --git a/api/chalicelib/core/notifications.py b/api/chalicelib/core/notifications.py index 1fc9a6508..d7829d37b 100644 --- a/api/chalicelib/core/notifications.py +++ b/api/chalicelib/core/notifications.py @@ -42,7 +42,7 @@ def get_all_count(tenant_id, user_id): def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None): - if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None: + if len(notification_ids) == 0 and endTimestamp is None: return False if startTimestamp is None: startTimestamp = 0 diff --git a/api/chalicelib/core/product_analytics.py b/api/chalicelib/core/product_analytics.py index 4b1e15a04..eb61f07fa 100644 --- a/api/chalicelib/core/product_analytics.py +++ b/api/chalicelib/core/product_analytics.py @@ -1,8 +1,13 @@ +from typing import List + import schemas +from chalicelib.core import metadata from chalicelib.core.metrics import __get_constraints, __get_constraint_values from chalicelib.utils import helper, dev from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils import sql_helper as sh +from time import time def __transform_journey(rows): @@ -19,90 +24,346 @@ def __transform_journey(rows): return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)} -JOURNEY_DEPTH = 5 +def __transform_journey2(rows, reverse_path=False): + # nodes should contain duplicates for different steps otherwise the UI crashes + nodes = [] + nodes_values = [] + links = [] + for r in rows: + source = f"{r['event_number_in_session']}_{r['event_type']}_{r['e_value']}" + if source not in nodes: + nodes.append(source) + # TODO: remove this after UI supports long values + nodes_values.append({"name": r['e_value'][:10], "eventType": r['event_type']}) + if r['next_value']: + target = f"{r['event_number_in_session'] + 1}_{r['next_type']}_{r['next_value']}" + if target not in nodes: + nodes.append(target) + # TODO: remove this after UI supports long values + nodes_values.append({"name": r['next_value'][:10], "eventType": r['next_type']}) + link = {"eventType": r['event_type'], "value": r["sessions_count"], + "avgTimeToTarget": r["avg_time_to_target"]} + if not reverse_path: + link["source"] = nodes.index(source) + link["target"] = nodes.index(target) + else: + link["source"] = nodes.index(target) + link["target"] = nodes.index(source) + links.append(link) + + return {"nodes": nodes_values, + "links": sorted(links, key=lambda x: x["value"], reverse=True)} + + JOURNEY_TYPES = { - schemas.ProductAnalyticsEventType.location: {"table": "events.pages", "column": "path", "table_id": "message_id"}, - schemas.ProductAnalyticsEventType.click: {"table": "events.clicks", "column": "label", "table_id": "message_id"}, - schemas.ProductAnalyticsEventType.input: {"table": "events.inputs", "column": "label", "table_id": "message_id"}, - schemas.ProductAnalyticsEventType.custom_event: {"table": "events_common.customs", "column": "name", - "table_id": "seq_index"} + schemas.ProductAnalyticsSelectedEventType.location: {"table": "events.pages", "column": "path"}, + schemas.ProductAnalyticsSelectedEventType.click: {"table": "events.clicks", "column": "label"}, + schemas.ProductAnalyticsSelectedEventType.input: {"table": "events.inputs", "column": "label"}, + schemas.ProductAnalyticsSelectedEventType.custom_event: {"table": "events_common.customs", "column": "name"} } -def path_analysis(project_id, data: schemas.PathAnalysisSchema): +def path_analysis(project_id: int, data: schemas.PathAnalysisSchema, + selected_event_type: List[schemas.ProductAnalyticsSelectedEventType], + density: int = 4, hide_minor_paths: bool = False): # pg_sub_query_subset = __get_constraints(project_id=project_id, data=args, duration=True, main_table="sessions", # time_constraint=True) # TODO: check if data=args is required - pg_sub_query_subset = __get_constraints(project_id=project_id, duration=True, main_table="sessions", - time_constraint=True) - event_start = None - event_table = JOURNEY_TYPES[schemas.ProductAnalyticsEventType.location]["table"] - event_column = JOURNEY_TYPES[schemas.ProductAnalyticsEventType.location]["column"] - event_table_id = JOURNEY_TYPES[schemas.ProductAnalyticsEventType.location]["table_id"] + pg_sub_query_subset = __get_constraints(project_id=project_id, duration=True, main_table="s", time_constraint=True) + sub_events = [] + start_points_join = "" + start_points_conditions = [] + sessions_conditions = ["start_ts>=%(startTimestamp)s", "start_ts<%(endTimestamp)s", + "project_id=%(project_id)s", "events_count > 1", "duration>0"] + if len(selected_event_type) == 0: + selected_event_type.append(schemas.ProductAnalyticsSelectedEventType.location) + sub_events.append({"table": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["table"], + "column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["column"], + "eventType": schemas.ProductAnalyticsSelectedEventType.location.value}) + else: + for v in selected_event_type: + if JOURNEY_TYPES.get(v): + sub_events.append({"table": JOURNEY_TYPES[v]["table"], + "column": JOURNEY_TYPES[v]["column"], + "eventType": v}) + extra_values = {} - for f in data.filters: - if f.type == schemas.ProductAnalyticsFilterType.start_point: - event_start = f.value[0] - elif f.type == schemas.ProductAnalyticsFilterType.event_type and JOURNEY_TYPES.get(f.value[0]): - event_table = JOURNEY_TYPES[f.value[0]]["table"] - event_column = JOURNEY_TYPES[f.value[0]]["column"] - elif f.type == schemas.ProductAnalyticsFilterType.user_id: - pg_sub_query_subset.append(f"sessions.user_id = %(user_id)s") - extra_values["user_id"] = f.value + reverse = False + meta_keys = None + exclusions = {} + for i, f in enumerate(data.filters): + op = sh.get_sql_operator(f.operator) + is_any = sh.isAny_opreator(f.operator) + is_not = sh.is_negation_operator(f.operator) + is_undefined = sh.isUndefined_operator(f.operator) + f_k = f"f_value_{i}" + extra_values = {**extra_values, **sh.multi_values(f.value, value_key=f_k)} + + if f.type in [schemas.ProductAnalyticsFilterType.start_point, schemas.ProductAnalyticsFilterType.end_point]: + for sf in f.filters: + extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)} + start_points_conditions.append(f"(event_type='{sf.type}' AND " + + sh.multi_conditions(f'e_value {op} %({f_k})s', sf.value, is_not=is_not, + value_key=f_k) + + ")") + + reverse = f.type == schemas.ProductAnalyticsFilterType.end_point + elif f.type == schemas.ProductAnalyticsFilterType.exclude: + for sf in f.filters: + if sf.type in selected_event_type: + extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)} + exclusions[sf.type] = [ + sh.multi_conditions(f'{JOURNEY_TYPES[sf.type]["column"]} != %({f_k})s', sf.value, is_not=True, + value_key=f_k)] + + # ---- meta-filters + if f.type == schemas.FilterType.user_browser: + if is_any: + sessions_conditions.append('user_browser IS NOT NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.user_os]: + if is_any: + sessions_conditions.append('user_os IS NOT NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.user_device]: + if is_any: + sessions_conditions.append('user_device IS NOT NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.user_country]: + if is_any: + sessions_conditions.append('user_country IS NOT NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type == schemas.FilterType.user_city: + if is_any: + sessions_conditions.append('user_city IS NOT NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type == schemas.FilterType.user_state: + if is_any: + sessions_conditions.append('user_state IS NOT NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.utm_source]: + if is_any: + sessions_conditions.append('utm_source IS NOT NULL') + elif is_undefined: + sessions_conditions.append('utm_source IS NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f'utm_source {op} %({f_k})s::text', f.value, is_not=is_not, + value_key=f_k)) + + elif f.type in [schemas.FilterType.utm_medium]: + if is_any: + sessions_conditions.append('utm_medium IS NOT NULL') + elif is_undefined: + sessions_conditions.append('utm_medium IS NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f'utm_medium {op} %({f_k})s::text', f.value, is_not=is_not, + value_key=f_k)) + + elif f.type in [schemas.FilterType.utm_campaign]: + if is_any: + sessions_conditions.append('utm_campaign IS NOT NULL') + elif is_undefined: + sessions_conditions.append('utm_campaign IS NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f'utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not, + value_key=f_k)) + + elif f.type == schemas.FilterType.duration: + if len(f.value) > 0 and f.value[0] is not None: + sessions_conditions.append("duration >= %(minDuration)s") + extra_values["minDuration"] = f.value[0] + if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0: + sessions_conditions.append("duration <= %(maxDuration)s") + extra_values["maxDuration"] = f.value[1] + elif f.type == schemas.FilterType.referrer: + # extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" + if is_any: + sessions_conditions.append('base_referrer IS NOT NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f"base_referrer {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + elif f.type == schemas.FilterType.metadata: + # get metadata list only if you need it + if meta_keys is None: + meta_keys = metadata.get(project_id=project_id) + meta_keys = {m["key"]: m["index"] for m in meta_keys} + if f.source in meta_keys.keys(): + if is_any: + sessions_conditions.append(f"{metadata.index_to_colname(meta_keys[f.source])} IS NOT NULL") + elif is_undefined: + sessions_conditions.append(f"{metadata.index_to_colname(meta_keys[f.source])} IS NULL") + else: + sessions_conditions.append( + sh.multi_conditions( + f"{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text", + f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: + if is_any: + sessions_conditions.append('user_id IS NOT NULL') + elif is_undefined: + sessions_conditions.append('user_id IS NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not, + value_key=f_k)) + + elif f.type in [schemas.FilterType.user_anonymous_id, + schemas.FilterType.user_anonymous_id_ios]: + if is_any: + sessions_conditions.append('user_anonymous_id IS NOT NULL') + elif is_undefined: + sessions_conditions.append('user_anonymous_id IS NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f"user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not, + value_key=f_k)) + + elif f.type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]: + if is_any: + sessions_conditions.append('rev_id IS NOT NULL') + elif is_undefined: + sessions_conditions.append('rev_id IS NULL') + else: + sessions_conditions.append( + sh.multi_conditions(f"rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) + + elif f.type == schemas.FilterType.platform: + # op = __ sh.get_sql_operator(f.operator) + sessions_conditions.append( + sh.multi_conditions(f"user_device_type {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + + elif f.type == schemas.FilterType.issue: + if is_any: + sessions_conditions.append("array_length(issue_types, 1) > 0") + else: + sessions_conditions.append( + sh.multi_conditions(f"%({f_k})s {op} ANY (issue_types)", f.value, is_not=is_not, + value_key=f_k)) + + elif f.type == schemas.FilterType.events_count: + sessions_conditions.append( + sh.multi_conditions(f"events_count {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + events_subquery = [] + for t in sub_events: + sub_events_conditions = ["e.timestamp >= %(startTimestamp)s", + "e.timestamp < %(endTimestamp)s"] + exclusions.get(t["eventType"], []) + events_subquery.append(f"""\ + SELECT session_id, {t["column"]} AS e_value, timestamp, '{t["eventType"]}' AS event_type + FROM {t["table"]} AS e + INNER JOIN sub_sessions USING (session_id) + WHERE {" AND ".join(sub_events_conditions)}""") + events_subquery = "\n UNION ALL \n".join(events_subquery) + + if reverse: + path_direction = "DESC" + else: + path_direction = "" + + if len(start_points_conditions) == 0: + start_points_join = """INNER JOIN + (SELECT event_type, e_value + FROM ranked_events + WHERE event_number_in_session = 1 + GROUP BY event_type, e_value + ORDER BY count(1) DESC + LIMIT 2 + ) AS top_start_events USING (event_type, e_value)""" + else: + start_points_conditions = ["(" + " OR ".join(start_points_conditions) + ")"] + start_points_conditions.append("event_number_in_session = 1") + start_points_conditions.append("next_value IS NOT NULL") with pg_client.PostgresClient() as cur: - pg_query = f"""SELECT source_event, - target_event, - count(*) AS value - FROM (SELECT event_number || '_' || value as target_event, - LAG(event_number || '_' || value, 1) OVER ( PARTITION BY session_rank ) AS source_event - FROM (SELECT value, - session_rank, - message_id, - ROW_NUMBER() OVER ( PARTITION BY session_rank ORDER BY timestamp ) AS event_number - - {f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY timestamp ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN timestamp ELSE NULL END as mark" - if event_start else ""} - - FROM (SELECT session_id, - message_id, - timestamp, - value, - SUM(new_session) OVER (ORDER BY session_id, timestamp) AS session_rank - FROM (SELECT *, - CASE - WHEN source_timestamp IS NULL THEN 1 - ELSE 0 END AS new_session - FROM (SELECT session_id, - {event_table_id} AS message_id, - timestamp, - {event_column} AS value, - LAG(timestamp) - OVER (PARTITION BY session_id ORDER BY timestamp) AS source_timestamp - FROM {event_table} INNER JOIN public.sessions USING (session_id) - WHERE {" AND ".join(pg_sub_query_subset)} - ) AS related_events) AS ranked_events) AS processed - {") AS marked) AS maxed WHERE timestamp >= max) AS filtered" if event_start else ""} - ) AS sorted_events - WHERE event_number <= %(JOURNEY_DEPTH)s) AS final - WHERE source_event IS NOT NULL - and target_event IS NOT NULL - GROUP BY source_event, target_event - ORDER BY value DESC - LIMIT 20;""" + pg_query = f"""\ +WITH sub_sessions AS ( SELECT session_id + FROM public.sessions + WHERE {" AND ".join(sessions_conditions)}), + sub_events AS ({events_subquery}), + ranked_events AS (SELECT * + FROM (SELECT session_id, + event_type, + e_value, + row_number() OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS event_number_in_session, + LEAD(e_value, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS next_value, + LEAD(event_type, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) AS next_type, + abs(LEAD(timestamp, 1) OVER (PARTITION BY session_id ORDER BY timestamp {path_direction}) - + timestamp) AS time_to_next + FROM sub_events + ORDER BY session_id) AS full_ranked_events + WHERE event_number_in_session < %(density)s + ), + start_points AS (SELECT session_id + FROM ranked_events {start_points_join} + WHERE {" AND ".join(start_points_conditions)}), + limited_events AS (SELECT * + FROM (SELECT *, + row_number() + OVER (PARTITION BY event_number_in_session, event_type, e_value ORDER BY sessions_count DESC ) AS _event_number_in_group + FROM (SELECT event_number_in_session, + event_type, + e_value, + next_type, + next_value, + time_to_next, + count(1) AS sessions_count + FROM ranked_events + INNER JOIN start_points USING (session_id) + GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, + time_to_next) AS groupped_events) AS ranked_groupped_events + WHERE _event_number_in_group < %(eventThresholdNumberInGroup)s) +SELECT event_number_in_session, + event_type, + e_value, + next_type, + next_value, + sessions_count, + avg(time_to_next) AS avg_time_to_target +FROM limited_events +GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, sessions_count +ORDER BY event_number_in_session, e_value, next_value;""" params = {"project_id": project_id, "startTimestamp": data.startTimestamp, - "endTimestamp": data.endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, + "endTimestamp": data.endTimestamp, "density": density, + "eventThresholdNumberInGroup": 8 if hide_minor_paths else 6, # TODO: add if data=args is required # **__get_constraint_values(args), **extra_values} query = cur.mogrify(pg_query, params) - print("----------------------") - print(query) - print("----------------------") + _now = time() + cur.execute(query) + if time() - _now > 3: + print(f">>>>>>>>>PathAnalysis long query ({int(time() - _now)}s)<<<<<<<<<") + print("----------------------") + print(query) + print("----------------------") rows = cur.fetchall() - return __transform_journey(rows) + return __transform_journey2(rows=rows, reverse_path=reverse) # # def __compute_weekly_percentage(rows): diff --git a/api/chalicelib/core/projects.py b/api/chalicelib/core/projects.py index ec2e2a1df..8cc07f162 100644 --- a/api/chalicelib/core/projects.py +++ b/api/chalicelib/core/projects.py @@ -41,12 +41,12 @@ def __update(tenant_id, project_id, changes): return helper.dict_to_camel_case(cur.fetchone()) -def __create(tenant_id, name): +def __create(tenant_id, data): with pg_client.PostgresClient() as cur: - query = cur.mogrify(f"""INSERT INTO public.projects (name, active) - VALUES (%(name)s,TRUE) + query = cur.mogrify(f"""INSERT INTO public.projects (name, platform, active) + VALUES (%(name)s,%(platform)s,TRUE) RETURNING project_id;""", - {"name": name}) + data) cur.execute(query=query) project_id = cur.fetchone()["project_id"] return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True) @@ -69,7 +69,8 @@ def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False): query = cur.mogrify(f"""{"SELECT *, first_recorded IS NOT NULL AS recorded FROM (" if recorded else ""} SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at, - created_at, sessions_last_check_at, sample_rate {extra_projection} + s.created_at, s.sessions_last_check_at, s.sample_rate, s.platform + {extra_projection} FROM public.projects AS s WHERE s.deleted_at IS NULL ORDER BY s.name {") AS raw" if recorded else ""};""", @@ -159,7 +160,7 @@ def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authoriza admin = users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: return {"errors": ["unauthorized"]} - return {"data": __create(tenant_id=tenant_id, name=data.name)} + return {"data": __create(tenant_id=tenant_id, data=data.model_dump())} def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema): @@ -169,7 +170,7 @@ def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema): if not admin["admin"] and not admin["superAdmin"]: return {"errors": ["unauthorized"]} return {"data": __update(tenant_id=tenant_id, project_id=project_id, - changes={"name": data.name})} + changes=data.model_dump())} def delete(tenant_id, user_id, project_id): @@ -200,14 +201,14 @@ def get_gdpr(project_id): return row -def edit_gdpr(project_id, gdpr): +def edit_gdpr(project_id, gdpr: schemas.GdprSchema): with pg_client.PostgresClient() as cur: query = cur.mogrify("""UPDATE public.projects SET gdpr = gdpr|| %(gdpr)s WHERE project_id = %(project_id)s AND deleted_at ISNULL RETURNING gdpr;""", - {"project_id": project_id, "gdpr": json.dumps(gdpr)}) + {"project_id": project_id, "gdpr": json.dumps(gdpr.model_dump_json())}) cur.execute(query=query) row = cur.fetchone() if not row: @@ -252,15 +253,9 @@ def get_capture_status(project_id): return helper.dict_to_camel_case(cur.fetchone()) -def update_capture_status(project_id, changes): - if "rate" not in changes and "captureAll" not in changes: - return {"errors": ["please provide 'rate' and/or 'captureAll' attributes to update."]} - if int(changes["rate"]) < 0 or int(changes["rate"]) > 100: - return {"errors": ["'rate' must be between 0..100."]} - sample_rate = 0 - if "rate" in changes: - sample_rate = int(changes["rate"]) - if changes.get("captureAll"): +def update_capture_status(project_id, changes: schemas.SampleRateSchema): + sample_rate = changes.rate + if changes.capture_all: sample_rate = 100 with pg_client.PostgresClient() as cur: query = cur.mogrify("""UPDATE public.projects diff --git a/api/chalicelib/core/saved_search.py b/api/chalicelib/core/saved_search.py index 1999c6758..fec59f13c 100644 --- a/api/chalicelib/core/saved_search.py +++ b/api/chalicelib/core/saved_search.py @@ -7,7 +7,7 @@ from chalicelib.utils.TimeUTC import TimeUTC def create(project_id, user_id, data: schemas.SavedSearchSchema): with pg_client.PostgresClient() as cur: - data = data.dict() + data = data.model_dump() data["filter"] = json.dumps(data["filter"]) query = cur.mogrify("""\ INSERT INTO public.searches (project_id, user_id, name, filter,is_public) @@ -25,7 +25,7 @@ def create(project_id, user_id, data: schemas.SavedSearchSchema): def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema): with pg_client.PostgresClient() as cur: - data = data.dict() + data = data.model_dump() data["filter"] = json.dumps(data["filter"]) query = cur.mogrify(f"""\ UPDATE public.searches @@ -43,7 +43,6 @@ def update(search_id, project_id, user_id, data: schemas.SavedSearchSchema): r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) r["filter"] = helper.old_search_payload_to_flat(r["filter"]) r = helper.dict_to_camel_case(r) - # r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"]) return r diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 9b5335326..61aef2edb 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -36,7 +36,7 @@ COALESCE((SELECT TRUE def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False): if data.bookmarked: - data.startDate, data.endDate = sessions_favorite.get_start_end_timestamp(project_id, user_id) + data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project_id, user_id) full_args, query_part = search_query_parts(data=data, error_status=error_status, errors_only=errors_only, favorite_only=data.bookmarked, issue=issue, project_id=project_id, @@ -69,7 +69,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ if data.order is None: data.order = schemas.SortOrderType.desc.value else: - data.order = data.order.value + data.order = data.order if data.sort is not None and data.sort != 'sessionsCount': sort = helper.key_to_snake_case(data.sort) g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" @@ -104,7 +104,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ if data.order is None: data.order = schemas.SortOrderType.desc.value else: - data.order = data.order.value + data.order = data.order sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) @@ -129,7 +129,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ print("--------- SESSIONS SEARCH QUERY EXCEPTION -----------") print(main_query.decode('UTF-8')) print("--------- PAYLOAD -----------") - print(data.json()) + print(data.model_dump_json()) print("--------------------") raise err if errors_only or ids_only: @@ -164,7 +164,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int, view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType, metric_of: schemas.MetricOfTable, metric_value: List): - step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate, + step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp, density=density, factor=1, decimal=True)) extra_event = None if metric_of == schemas.MetricOfTable.visited_url: @@ -204,7 +204,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d print("--------- SESSIONS-SERIES QUERY EXCEPTION -----------") print(main_query.decode('UTF-8')) print("--------- PAYLOAD -----------") - print(data.json()) + print(data.model_dump_json()) print("--------------------") raise err if view_type == schemas.MetricTimeseriesViewType.line_chart: @@ -267,7 +267,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d return sessions -def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema): +def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2): return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details, schemas.EventType.graphql] \ or event.type in [schemas.PerformanceEventType.location_dom_complete, @@ -284,7 +284,7 @@ def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema): def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None): ss_constraints = [] - full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate, + full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp, "projectId": project_id, "userId": user_id} extra_constraints = [ "s.project_id = %(project_id)s", @@ -516,22 +516,6 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, ss_constraints.append( sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not, value_key=f_k)) - # search sessions with click_rage on a specific selector - if len(f.filters) > 0 and schemas.IssueType.click_rage in f.value: - for j, sf in enumerate(f.filters): - if sf.operator == schemas.IssueFilterOperator._on_selector: - f_k = f"f_value{i}_{j}" - full_args = {**full_args, **sh.multi_values(sf.value, value_key=f_k)} - extra_constraints += ["mc.timestamp>=%(startDate)s", - "mc.timestamp<=%(endDate)s", - "mis.type='click_rage'", - sh.multi_conditions(f"mc.selector=%({f_k})s", - sf.value, is_not=is_not, - value_key=f_k)] - - extra_from += """INNER JOIN events.clicks AS mc USING(session_id) - INNER JOIN events_common.issues USING (session_id,timestamp) - INNER JOIN public.issues AS mis USING (issue_id)\n""" elif filter_type == schemas.FilterType.events_count: extra_constraints.append( @@ -582,11 +566,11 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, event_where.append(f"event_{event_index - 1}.timestamp <= main.timestamp") e_k = f"e_value{i}" s_k = e_k + "_source" - if event.type != schemas.PerformanceEventType.time_between_events: - event.value = helper.values_for_operator(value=event.value, op=event.operator) - full_args = {**full_args, - **sh.multi_values(event.value, value_key=e_k), - **sh.multi_values(event.source, value_key=s_k)} + + event.value = helper.values_for_operator(value=event.value, op=event.operator) + full_args = {**full_args, + **sh.multi_values(event.value, value_key=e_k), + **sh.multi_values(event.source, value_key=s_k)} if event_type == events.EventType.CLICK.ui_type: event_from = event_from % f"{events.EventType.CLICK.table} AS main " @@ -741,44 +725,6 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s", event.source, value_key=e_k)) - elif event_type == schemas.PerformanceEventType.time_between_events: - event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) " - if not isinstance(event.value[0].value, list): - event.value[0].value = [event.value[0].value] - if not isinstance(event.value[1].value, list): - event.value[1].value = [event.value[1].value] - event.value[0].value = helper.values_for_operator(value=event.value[0].value, - op=event.value[0].operator) - event.value[1].value = helper.values_for_operator(value=event.value[1].value, - op=event.value[0].operator) - e_k1 = e_k + "_e1" - e_k2 = e_k + "_e2" - full_args = {**full_args, - **sh.multi_values(event.value[0].value, value_key=e_k1), - **sh.multi_values(event.value[1].value, value_key=e_k2)} - s_op = sh.get_sql_operator(event.value[0].operator) - event_where += ["main2.timestamp >= %(startDate)s", "main2.timestamp <= %(endDate)s"] - if event_index > 0 and not or_events: - event_where.append("main2.session_id=event_0.session_id") - is_any = sh.isAny_opreator(event.value[0].operator) - if not is_any: - event_where.append( - sh.multi_conditions( - f"main.{getattr(events.EventType, event.value[0].type).column} {s_op} %({e_k1})s", - event.value[0].value, value_key=e_k1)) - s_op = sh.get_sql_operator(event.value[1].operator) - is_any = sh.isAny_opreator(event.value[1].operator) - if not is_any: - event_where.append( - sh.multi_conditions( - f"main2.{getattr(events.EventType, event.value[1].type).column} {s_op} %({e_k2})s", - event.value[1].value, value_key=e_k2)) - - e_k += "_custom" - full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} - event_where.append( - sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator.value} %({e_k})s", - event.source, value_key=e_k)) elif event_type == schemas.EventType.request_details: event_from = event_from % f"{events.EventType.REQUEST.table} AS main " @@ -905,9 +851,9 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, else: data.events = [] # --------------------------------------------------------------------------- - if data.startDate is not None: + if data.startTimestamp is not None: extra_constraints.append("s.start_ts >= %(startDate)s") - if data.endDate is not None: + if data.endTimestamp is not None: extra_constraints.append("s.start_ts <= %(endDate)s") # if data.platform is not None: # if data.platform == schemas.PlatformType.mobile: diff --git a/api/chalicelib/core/sessions_mobs.py b/api/chalicelib/core/sessions_mobs.py index 9703db8eb..9e69e59fd 100644 --- a/api/chalicelib/core/sessions_mobs.py +++ b/api/chalicelib/core/sessions_mobs.py @@ -14,6 +14,16 @@ def __get_mob_keys(project_id, session_id): ] +def __get_ios_video_keys(project_id, session_id): + params = { + "sessionId": session_id, + "projectId": project_id + } + return [ + config("SESSION_IOS_VIDEO_PATTERN", default="replay.mp4") % params, + ] + + def __get_mob_keys_deprecated(session_id): return [str(session_id), str(session_id) + "e"] @@ -44,12 +54,30 @@ def get_urls_depercated(session_id, check_existence: bool = True): return results -def get_ios(session_id): - return StorageClient.get_presigned_url_for_sharing( - bucket=config("ios_bucket"), - expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), - key=str(session_id) - ) +def get_ios(session_id, project_id, check_existence=False): + results = [] + for k in __get_mob_keys(project_id=project_id, session_id=session_id): + if check_existence and not StorageClient.exists(bucket=config("IOS_BUCKET"), key=k): + continue + results.append(StorageClient.get_presigned_url_for_sharing( + bucket=config("IOS_BUCKET"), + expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), + key=k + )) + return results + + +def get_ios_videos(session_id, project_id, check_existence=False): + results = [] + for k in __get_ios_video_keys(project_id=project_id, session_id=session_id): + if check_existence and not StorageClient.exists(bucket=config("IOS_VIDEO_BUCKET"), key=k): + continue + results.append(StorageClient.get_presigned_url_for_sharing( + bucket=config("IOS_VIDEO_BUCKET"), + expires_in=config("PRESIGNED_URL_EXPIRATION", cast=int, default=900), + key=k + )) + return results def delete_mobs(project_id, session_ids): diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index 0ad78fee7..c51c8eeb5 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -69,7 +69,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se query = cur.mogrify(f"""SELECT sessions_notes.*, users.name AS user_name FROM sessions_notes INNER JOIN users USING (user_id) WHERE {" AND ".join(conditions)} - ORDER BY created_at {data.order.value} + ORDER BY created_at {data.order} LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params}) @@ -86,7 +86,7 @@ def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNote query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public) VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""", - {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict()}) + {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.model_dump()}) cur.execute(query) result = helper.dict_to_camel_case(cur.fetchone()) if result: @@ -115,7 +115,7 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot AND note_id = %(note_id)s AND deleted_at ISNULL RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s) AS user_name;""", - {"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.dict()}) + {"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump()}) ) row = helper.dict_to_camel_case(cur.fetchone()) if row: diff --git a/api/chalicelib/core/sessions_replay.py b/api/chalicelib/core/sessions_replay.py index f27903a54..0c0922602 100644 --- a/api/chalicelib/core/sessions_replay.py +++ b/api/chalicelib/core/sessions_replay.py @@ -41,8 +41,6 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_ AND s.session_id = %(session_id)s;""", {"project_id": project_id, "session_id": session_id, "userId": context.user_id} ) - # print("===============") - # print(query) cur.execute(query=query) data = cur.fetchone() @@ -55,9 +53,9 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_ if e["type"].endswith("_IOS"): e["type"] = e["type"][:-len("_IOS")] data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_session_id(project_id=project_id, + session_id=session_id) + data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id, project_id=project_id) else: data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, group_clickrage=True) @@ -117,8 +115,6 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat AND s.session_id = %(session_id)s;""", {"project_id": project_id, "session_id": session_id, "userId": context.user_id} ) - # print("===============") - # print(query) cur.execute(query=query) data = cur.fetchone() @@ -126,7 +122,10 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat data = helper.dict_to_camel_case(data) if full_data: if data["platform"] == 'ios': - data['mobsUrl'] = sessions_mobs.get_ios(session_id=session_id) + data['domURL'] = sessions_mobs.get_ios(session_id=session_id, project_id=project_id, + check_existence=False) + data['videoURL'] = sessions_mobs.get_ios_videos(session_id=session_id, project_id=project_id, + check_existence=False) else: data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id, check_existence=False) @@ -154,8 +153,6 @@ def get_events(project_id, session_id): AND s.session_id = %(session_id)s;""", {"project_id": project_id, "session_id": session_id} ) - # print("===============") - # print(query) cur.execute(query=query) s_data = cur.fetchone() @@ -168,8 +165,8 @@ def get_events(project_id, session_id): if e["type"].endswith("_IOS"): e["type"] = e["type"][:-len("_IOS")] data['crashes'] = events_ios.get_crashes_by_session_id(session_id=session_id) - data['userEvents'] = events_ios.get_customs_by_sessionId(project_id=project_id, - session_id=session_id) + data['userEvents'] = events_ios.get_customs_by_session_id(project_id=project_id, + session_id=session_id) else: data['events'] = events.get_by_session_id(project_id=project_id, session_id=session_id, group_clickrage=True) @@ -199,11 +196,11 @@ def reduce_issues(issues_list): i = 0 # remove same-type issues if the time between them is <2s while i < len(issues_list) - 1: - for j in range(i+1,len(issues_list)): + for j in range(i + 1, len(issues_list)): if issues_list[i]["type"] == issues_list[j]["type"]: break else: - i+=1 + i += 1 break if issues_list[i]["timestamp"] - issues_list[j]["timestamp"] < 2000: diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index fd824509d..e020fcfd5 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -24,17 +24,19 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36 21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042} -def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: +def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]: """ Add minimal timestamp :param filter_d: dict contains events&filters&... :return: """ - stages: [dict] = filter_d.get("events", []) - filters: [dict] = filter_d.get("filters", []) - filter_issues = filter_d.get("issueTypes") - if filter_issues is None or len(filter_issues) == 0: - filter_issues = [] + stages: [dict] = filter_d.events + filters: [dict] = filter_d.filters + filter_issues = [] + # TODO: enable this if needed by an endpoint + # filter_issues = filter_d.get("issueTypes") + # if filter_issues is None or len(filter_issues) == 0: + # filter_issues = [] stage_constraints = ["main.timestamp <= %(endTimestamp)s"] first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s", "s.start_ts <= %(endTimestamp)s"] @@ -120,22 +122,22 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: i = -1 for s in stages: - if s.get("operator") is None: - s["operator"] = "is" + if s.operator is None: + s.operator = schemas.SearchEventOperator._is - if not isinstance(s["value"], list): - s["value"] = [s["value"]] - is_any = sh.isAny_opreator(s["operator"]) - if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0: + if not isinstance(s.value, list): + s.value = [s.value] + is_any = sh.isAny_opreator(s.operator) + if not is_any and isinstance(s.value, list) and len(s.value) == 0: continue i += 1 if i == 0: extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"] else: extra_from = [] - op = sh.get_sql_operator(s["operator"]) + op = sh.get_sql_operator(s.operator) # event_type = s["type"].upper() - event_type = s["type"] + event_type = s.type if event_type == events.EventType.CLICK.ui_type: next_table = events.EventType.CLICK.table next_col_name = events.EventType.CLICK.column @@ -165,16 +167,16 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: print(f"=================UNDEFINED:{event_type}") continue - values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), + values = {**values, **sh.multi_values(helper.values_for_operator(value=s.value, op=s.operator), value_key=f"value{i + 1}")} - if sh.is_negation_operator(s["operator"]) and i > 0: + if sh.is_negation_operator(s.operator) and i > 0: op = sh.reverse_sql_operator(op) main_condition = "left_not.session_id ISNULL" extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id FROM {next_table} AS s_main WHERE {sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s", - values=s["value"], value_key=f"value{i + 1}")} + values=s.value, value_key=f"value{i + 1}")} AND s_main.timestamp >= T{i}.stage{i}_timestamp AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""") else: @@ -182,7 +184,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: main_condition = "TRUE" else: main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s", - values=s["value"], value_key=f"value{i + 1}") + values=s.value, value_key=f"value{i + 1}") n_stages_query.append(f""" (SELECT main.session_id, {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp @@ -225,7 +227,8 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: """ # LIMIT 10000 - params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"], + params = {"project_id": project_id, "startTimestamp": filter_d.startTimestamp, + "endTimestamp": filter_d.endTimestamp, "issueTypes": tuple(filter_issues), **values} with pg_client.PostgresClient() as cur: query = cur.mogrify(n_stages_query, params) @@ -239,7 +242,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------") print(query.decode('UTF-8')) print("--------- PAYLOAD -----------") - print(filter_d) + print(filter_d.model_dump_json()) print("--------------------") raise err return rows @@ -544,9 +547,9 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False) return n_critical_issues, issues_dict, total_drop_due_to_issues -def get_top_insights(filter_d, project_id): +def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id): output = [] - stages = filter_d.get("events", []) + stages = filter_d.events # TODO: handle 1 stage alone if len(stages) == 0: print("no stages found") @@ -554,17 +557,24 @@ def get_top_insights(filter_d, project_id): elif len(stages) == 1: # TODO: count sessions, and users for single stage output = [{ - "type": stages[0]["type"], - "value": stages[0]["value"], + "type": stages[0].type, + "value": stages[0].value, "dropPercentage": None, - "operator": stages[0]["operator"], + "operator": stages[0].operator, "sessionsCount": 0, "dropPct": 0, "usersCount": 0, "dropDueToIssues": 0 }] - counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d), + # original + # counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d), + # project_id=project_id, user_id=None, count_only=True) + # first change + # counts = sessions.search_sessions(data=schemas.FlatSessionsSearchPayloadSchema.parse_obj(filter_d), + # project_id=project_id, user_id=None, count_only=True) + # last change + counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d), project_id=project_id, user_id=None, count_only=True) output[0]["sessionsCount"] = counts["countSessions"] output[0]["usersCount"] = counts["countUsers"] @@ -583,9 +593,9 @@ def get_top_insights(filter_d, project_id): return stages_list, total_drop_due_to_issues -def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None): +def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None): output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []}) - stages = filter_d.get("events", []) + stages = filter_d.events # The result of the multi-stage query rows = get_stages_and_events(filter_d=filter_d, project_id=project_id) # print(json.dumps(rows[0],indent=4)) diff --git a/api/chalicelib/core/signup.py b/api/chalicelib/core/signup.py index ec4d99b3b..0a1caa0be 100644 --- a/api/chalicelib/core/signup.py +++ b/api/chalicelib/core/signup.py @@ -16,7 +16,7 @@ def create_tenant(data: schemas.UserSignupSchema): email = data.email print(f"=====================> {email}") - password = data.password + password = data.password.get_secret_value() if email is None or len(email) < 5: errors.append("Invalid email address.") diff --git a/api/chalicelib/core/users.py b/api/chalicelib/core/users.py index 5a9f8b3ee..08ab84d9a 100644 --- a/api/chalicelib/core/users.py +++ b/api/chalicelib/core/users.py @@ -223,29 +223,29 @@ def update(tenant_id, user_id, changes, output=True): return get(user_id=user_id, tenant_id=tenant_id) -def create_member(tenant_id, user_id, data, background_tasks: BackgroundTasks): +def create_member(tenant_id, user_id, data: schemas.CreateMemberSchema, background_tasks: BackgroundTasks): admin = get(tenant_id=tenant_id, user_id=user_id) if not admin["admin"] and not admin["superAdmin"]: return {"errors": ["unauthorized"]} - if data.get("userId") is not None: + if data.user_id is not None: return {"errors": ["please use POST/PUT /client/members/{memberId} for update"]} - user = get_by_email_only(email=data["email"]) + user = get_by_email_only(email=data.email) if user: return {"errors": ["user already exists"]} - name = data.get("name", None) - if name is None or len(name) == 0: - name = data["email"] + + if data.name is None or len(data.name) == 0: + data.name = data.email invitation_token = __generate_invitation_token() - user = get_deleted_user_by_email(email=data["email"]) + user = get_deleted_user_by_email(email=data.email) if user is not None: - new_member = restore_member(email=data["email"], invitation_token=invitation_token, - admin=data.get("admin", False), name=name, user_id=user["userId"]) + new_member = restore_member(email=data.email, invitation_token=invitation_token, + admin=data.admin, name=data.name, user_id=user["userId"]) else: - new_member = create_new_member(email=data["email"], invitation_token=invitation_token, - admin=data.get("admin", False), name=name) + new_member = create_new_member(email=data.email, invitation_token=invitation_token, + admin=data.admin, name=data.name) new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken")) background_tasks.add_task(email_helper.send_team_invitation, **{ - "recipient": data["email"], + "recipient": data.email, "invitation_link": new_member["invitationLink"], "client_id": tenants.get_by_tenant_id(tenant_id)["name"], "sender_name": admin["name"] @@ -607,11 +607,7 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud): r = cur.fetchone() return r is not None \ and r.get("jwt_iat") is not None \ - and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ - or (jwt_aud.startswith("plugin") \ - and (r["changed_at"] is None \ - or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000))) - ) + and abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 def change_jwt_iat(user_id): diff --git a/api/chalicelib/core/webhook.py b/api/chalicelib/core/webhook.py index fb0906b9c..31c0f4bcd 100644 --- a/api/chalicelib/core/webhook.py +++ b/api/chalicelib/core/webhook.py @@ -77,6 +77,8 @@ def update(tenant_id, webhook_id, changes, replace_none=False): {"id": webhook_id, **changes}) ) w = helper.dict_to_camel_case(cur.fetchone()) + if w is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"webhook not found.") w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) if replace_none: for k in w.keys(): @@ -120,20 +122,22 @@ def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = sch return row["exists"] -def add_edit(tenant_id, data, replace_none=None): - if "name" in data and len(data["name"]) > 0 \ - and exists_by_name(name=data["name"], exclude_id=data.get("webhookId")): +def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None): + if len(data.name) > 0 \ + and exists_by_name(name=data.name, exclude_id=data.webhook_id): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") - if data.get("webhookId") is not None: - return update(tenant_id=tenant_id, webhook_id=data["webhookId"], - changes={"endpoint": data["endpoint"], - "authHeader": None if "authHeader" not in data else data["authHeader"], - "name": data["name"] if "name" in data else ""}, replace_none=replace_none) + if data.webhook_id is not None: + return update(tenant_id=tenant_id, webhook_id=data.webhook_id, + changes={"endpoint": data.endpoint, + "authHeader": data.auth_header, + "name": data.name}, + replace_none=replace_none) else: return add(tenant_id=tenant_id, - endpoint=data["endpoint"], - auth_header=None if "authHeader" not in data else data["authHeader"], - name=data["name"] if "name" in data else "", replace_none=replace_none) + endpoint=data.endpoint, + auth_header=data.auth_header, + name=data.name, + replace_none=replace_none) def delete(tenant_id, webhook_id): diff --git a/api/env.default b/api/env.default index 7d5266470..5626cd5ee 100644 --- a/api/env.default +++ b/api/env.default @@ -49,9 +49,12 @@ EFS_DEVTOOLS_MOB_PATTERN=%(sessionId)sdevtools SESSION_MOB_PATTERN_S=%(sessionId)s/dom.mobs SESSION_MOB_PATTERN_E=%(sessionId)s/dom.mobe DEVTOOLS_MOB_PATTERN=%(sessionId)s/devtools.mob +SESSION_IOS_VIDEO_PATTERN=%(sessionId)s/replay.mp4 PRESIGNED_URL_EXPIRATION=3600 ASSIST_JWT_EXPIRATION=144000 ASSIST_JWT_SECRET= PYTHONUNBUFFERED=1 REDIS_STRING=redis://redis-master.db.svc.cluster.local:6379 -SCH_DELETE_DAYS=30 \ No newline at end of file +SCH_DELETE_DAYS=30 +IOS_BUCKET=mobs +IOS_VIDEO_BUCKET=mobs \ No newline at end of file diff --git a/api/requirements-alerts.txt b/api/requirements-alerts.txt index a9de976ad..599600919 100644 --- a/api/requirements-alerts.txt +++ b/api/requirements-alerts.txt @@ -1,15 +1,16 @@ -requests==2.31.0 +# Keep this version to not have conflicts between requests and boto3 urllib3==1.26.16 -boto3==1.26.148 -pyjwt==2.7.0 -psycopg2-binary==2.9.6 -elasticsearch==8.8.0 -jira==3.5.1 +requests==2.31.0 +boto3==1.28.30 +pyjwt==2.8.0 +psycopg2-binary==2.9.7 +elasticsearch==8.9.0 +jira==3.5.2 -fastapi==0.96.0 -uvicorn[standard]==0.22.0 +fastapi==0.101.1 +uvicorn[standard]==0.23.2 python-decouple==3.8 -pydantic[email]==1.10.8 -apscheduler==3.10.1 +pydantic[email]==2.2.1 +apscheduler==3.10.4 diff --git a/api/requirements.txt b/api/requirements.txt index 8a0deca2a..8409af6b2 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,17 +1,18 @@ -requests==2.31.0 +# Keep this version to not have conflicts between requests and boto3 urllib3==1.26.16 -boto3==1.26.148 -pyjwt==2.7.0 -psycopg2-binary==2.9.6 -elasticsearch==8.8.0 -jira==3.5.1 +requests==2.31.0 +boto3==1.28.40 +pyjwt==2.8.0 +psycopg2-binary==2.9.7 +elasticsearch==8.9.0 +jira==3.5.2 -fastapi==0.97.0 -uvicorn[standard]==0.22.0 +fastapi==0.103.1 +uvicorn[standard]==0.23.2 python-decouple==3.8 -pydantic[email]==1.10.8 -apscheduler==3.10.1 +pydantic[email]==2.3.0 +apscheduler==3.10.4 -redis==4.5.5 +redis==5.0.0 diff --git a/api/routers/core.py b/api/routers/core.py index 1a0c6afdc..fff5f3de5 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -1,5 +1,6 @@ from typing import Union +from decouple import config from fastapi import Depends, Body import schemas @@ -18,20 +19,6 @@ from routers.base import get_routers public_app, app, app_apikey = get_routers() -@app.post('/{projectId}/sessions/search', tags=["sessions"]) -def sessions_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id) - return {'data': data} - - -@app.post('/{projectId}/sessions/search/ids', tags=["sessions"]) -def session_ids_search(projectId: int, data: schemas.FlatSessionsSearchPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True) - return {'data': data} - - @app.get('/{projectId}/events/search', tags=["events"]) def events_search(projectId: int, q: str, type: Union[schemas.FilterType, schemas.EventType, @@ -107,9 +94,9 @@ def get_sentry(projectId: int, context: schemas.CurrentContext = Depends(OR_cont @app.post('/{projectId}/integrations/sentry', tags=["integrations"]) -def add_edit_sentry(projectId: int, data: schemas.SentrySchema = Body(...), +def add_edit_sentry(projectId: int, data: schemas.IntegrationSentrySchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + return {"data": log_tool_sentry.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/sentry', tags=["integrations"]) @@ -133,9 +120,9 @@ def get_datadog(projectId: int, context: schemas.CurrentContext = Depends(OR_con @app.post('/{projectId}/integrations/datadog', tags=["integrations"]) -def add_edit_datadog(projectId: int, data: schemas.DatadogSchema = Body(...), +def add_edit_datadog(projectId: int, data: schemas.IntegrationDatadogSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + return {"data": log_tool_datadog.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/datadog', tags=["integrations"]) @@ -154,9 +141,9 @@ def get_stackdriver(projectId: int, context: schemas.CurrentContext = Depends(OR @app.post('/{projectId}/integrations/stackdriver', tags=["integrations"]) -def add_edit_stackdriver(projectId: int, data: schemas.StackdriverSchema = Body(...), +def add_edit_stackdriver(projectId: int, data: schemas.IntegartionStackdriverSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + return {"data": log_tool_stackdriver.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/stackdriver', tags=["integrations"]) @@ -175,9 +162,9 @@ def get_newrelic(projectId: int, context: schemas.CurrentContext = Depends(OR_co @app.post('/{projectId}/integrations/newrelic', tags=["integrations"]) -def add_edit_newrelic(projectId: int, data: schemas.NewrelicSchema = Body(...), +def add_edit_newrelic(projectId: int, data: schemas.IntegrationNewrelicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + return {"data": log_tool_newrelic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/newrelic', tags=["integrations"]) @@ -196,9 +183,9 @@ def get_rollbar(projectId: int, context: schemas.CurrentContext = Depends(OR_con @app.post('/{projectId}/integrations/rollbar', tags=["integrations"]) -def add_edit_rollbar(projectId: int, data: schemas.RollbarSchema = Body(...), +def add_edit_rollbar(projectId: int, data: schemas.IntegrationRollbarSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + return {"data": log_tool_rollbar.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/rollbar', tags=["integrations"]) @@ -207,9 +194,9 @@ def delete_datadog(projectId: int, _=Body(None), context: schemas.CurrentContext @app.post('/integrations/bugsnag/list_projects', tags=["integrations"]) -def list_projects_bugsnag(data: schemas.BugsnagBasicSchema = Body(...), +def list_projects_bugsnag(data: schemas.IntegrationBugsnagBasicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_bugsnag.list_projects(auth_token=data.authorizationToken)} + return {"data": log_tool_bugsnag.list_projects(auth_token=data.authorization_token)} @app.get('/integrations/bugsnag', tags=["integrations"]) @@ -223,9 +210,9 @@ def get_bugsnag(projectId: int, context: schemas.CurrentContext = Depends(OR_con @app.post('/{projectId}/integrations/bugsnag', tags=["integrations"]) -def add_edit_bugsnag(projectId: int, data: schemas.BugsnagSchema = Body(...), +def add_edit_bugsnag(projectId: int, data: schemas.IntegrationBugsnagSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + return {"data": log_tool_bugsnag.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/bugsnag', tags=["integrations"]) @@ -234,7 +221,7 @@ def delete_bugsnag(projectId: int, _=Body(None), context: schemas.CurrentContext @app.post('/integrations/cloudwatch/list_groups', tags=["integrations"]) -def list_groups_cloudwatch(data: schemas.CloudwatchBasicSchema = Body(...), +def list_groups_cloudwatch(data: schemas.IntegrationCloudwatchBasicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": log_tool_cloudwatch.list_log_groups(aws_access_key_id=data.awsAccessKeyId, aws_secret_access_key=data.awsSecretAccessKey, @@ -252,9 +239,9 @@ def get_cloudwatch(projectId: int, context: schemas.CurrentContext = Depends(OR_ @app.post('/{projectId}/integrations/cloudwatch', tags=["integrations"]) -def add_edit_cloudwatch(projectId: int, data: schemas.CloudwatchSchema = Body(...), +def add_edit_cloudwatch(projectId: int, data: schemas.IntegrationCloudwatchSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + return {"data": log_tool_cloudwatch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/cloudwatch', tags=["integrations"]) @@ -273,16 +260,16 @@ def get_elasticsearch(projectId: int, context: schemas.CurrentContext = Depends( @app.post('/integrations/elasticsearch/test', tags=["integrations"]) -def test_elasticsearch_connection(data: schemas.ElasticsearchBasicSchema = Body(...), +def test_elasticsearch_connection(data: schemas.IntegrationElasticsearchTestSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_elasticsearch.ping(tenant_id=context.tenant_id, **data.dict())} + return {"data": log_tool_elasticsearch.ping(tenant_id=context.tenant_id, data=data)} @app.post('/{projectId}/integrations/elasticsearch', tags=["integrations"]) -def add_edit_elasticsearch(projectId: int, data: schemas.ElasticsearchSchema = Body(...), +def add_edit_elasticsearch(projectId: int, data: schemas.IntegrationElasticsearchSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return { - "data": log_tool_elasticsearch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + "data": log_tool_elasticsearch.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/elasticsearch', tags=["integrations"]) @@ -301,9 +288,9 @@ def get_sumologic(projectId: int, context: schemas.CurrentContext = Depends(OR_c @app.post('/{projectId}/integrations/sumologic', tags=["integrations"]) -def add_edit_sumologic(projectId: int, data: schemas.SumologicSchema = Body(...), +def add_edit_sumologic(projectId: int, data: schemas.IntegrationSumologicSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": log_tool_sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data.dict())} + return {"data": log_tool_sumologic.add_edit(tenant_id=context.tenant_id, project_id=projectId, data=data)} @app.delete('/{projectId}/integrations/sumologic', tags=["integrations"]) @@ -341,7 +328,7 @@ def get_integration_status_github(context: schemas.CurrentContext = Depends(OR_c @app.post('/integrations/jira', tags=["integrations"]) -def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...), +def add_edit_jira_cloud(data: schemas.IssueTrackingJiraSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): if not data.url.endswith('atlassian.net'): return {"errors": ["url must be a valid JIRA URL (example.atlassian.net)"]} @@ -350,18 +337,18 @@ def add_edit_jira_cloud(data: schemas.JiraSchema = Body(...), user_id=context.user_id) if error is not None and integration is None: return error - return {"data": integration.add_edit(data=data.dict())} + return {"data": integration.add_edit(data=data)} @app.post('/integrations/github', tags=["integrations"]) -def add_edit_github(data: schemas.GithubSchema = Body(...), +def add_edit_github(data: schemas.IssueTrackingGithubSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): error, integration = integrations_manager.get_integration(tool=integration_github.PROVIDER, tenant_id=context.tenant_id, user_id=context.user_id) if error is not None: return error - return {"data": integration.add_edit(data=data.dict())} + return {"data": integration.add_edit(data=data)} @app.delete('/integrations/issues', tags=["integrations"]) @@ -452,7 +439,7 @@ def get_gdpr(projectId: int, context: schemas.CurrentContext = Depends(OR_contex @app.post('/{projectId}/gdpr', tags=["projects", "gdpr"]) def edit_gdpr(projectId: int, data: schemas.GdprSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - result = projects.edit_gdpr(project_id=projectId, gdpr=data.dict()) + result = projects.edit_gdpr(project_id=projectId, gdpr=data) if "errors" in result: return result return {"data": result} @@ -470,20 +457,20 @@ def get_metadata(projectId: int, context: schemas.CurrentContext = Depends(OR_co return {"data": metadata.get(project_id=projectId)} -@app.post('/{projectId}/metadata/list', tags=["metadata"]) -def add_edit_delete_metadata(projectId: int, data: schemas.MetadataListSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return metadata.add_edit_delete(tenant_id=context.tenant_id, project_id=projectId, new_metas=data.list) +# @app.post('/{projectId}/metadata/list', tags=["metadata"]) +# def add_edit_delete_metadata(projectId: int, data: schemas.MetadataListSchema = Body(...), +# context: schemas.CurrentContext = Depends(OR_context)): +# return metadata.add_edit_delete(tenant_id=context.tenant_id, project_id=projectId, new_metas=data.list) @app.post('/{projectId}/metadata', tags=["metadata"]) -def add_metadata(projectId: int, data: schemas.MetadataBasicSchema = Body(...), +def add_metadata(projectId: int, data: schemas.MetadataSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return metadata.add(tenant_id=context.tenant_id, project_id=projectId, new_name=data.key) @app.post('/{projectId}/metadata/{index}', tags=["metadata"]) -def edit_metadata(projectId: int, index: int, data: schemas.MetadataBasicSchema = Body(...), +def edit_metadata(projectId: int, index: int, data: schemas.MetadataSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return metadata.edit(tenant_id=context.tenant_id, project_id=projectId, index=index, new_name=data.key) @@ -519,7 +506,7 @@ def get_capture_status(projectId: int, context: schemas.CurrentContext = Depends @app.post('/{projectId}/sample_rate', tags=["projects"]) def update_capture_status(projectId: int, data: schemas.SampleRateSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": projects.update_capture_status(project_id=projectId, changes=data.dict())} + return {"data": projects.update_capture_status(project_id=projectId, changes=data)} @app.get('/announcements', tags=["announcements"]) @@ -688,6 +675,8 @@ def batch_view_notifications(data: schemas.NotificationsViewSchema, @app.get('/boarding', tags=['boarding']) def get_boarding_state(context: schemas.CurrentContext = Depends(OR_context)): + if config("LOCAL_DEV", cast=bool, default=False): + return {"data": ""} return {"data": boarding.get_state(tenant_id=context.tenant_id)} @@ -727,9 +716,9 @@ def delete_slack_integration(webhookId: int, _=Body(None), context: schemas.Curr @app.put('/webhooks', tags=["webhooks"]) -def add_edit_webhook(data: schemas.CreateEditWebhookSchema = Body(...), +def add_edit_webhook(data: schemas.WebhookSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": webhook.add_edit(tenant_id=context.tenant_id, data=data.dict(), replace_none=True)} + return {"data": webhook.add_edit(tenant_id=context.tenant_id, data=data, replace_none=True)} @app.get('/webhooks', tags=["webhooks"]) @@ -739,7 +728,7 @@ def get_webhooks(context: schemas.CurrentContext = Depends(OR_context)): @app.delete('/webhooks/{webhookId}', tags=["webhooks"]) def delete_webhook(webhookId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId)} + return webhook.delete(tenant_id=context.tenant_id, webhook_id=webhookId) @app.get('/client/members', tags=["client"]) @@ -765,8 +754,8 @@ def generate_new_user_token(context: schemas.CurrentContext = Depends(OR_context @app.post('/account/password', tags=["account"]) def change_client_password(data: schemas.EditUserPasswordSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return users.change_password(email=context.email, old_password=data.old_password, - new_password=data.new_password, tenant_id=context.tenant_id, + return users.change_password(email=context.email, old_password=data.old_password.get_secret_value(), + new_password=data.new_password.get_secret_value(), tenant_id=context.tenant_id, user_id=context.user_id) diff --git a/api/routers/core_dynamic.py b/api/routers/core_dynamic.py index 12f3806d5..d34244d68 100644 --- a/api/routers/core_dynamic.py +++ b/api/routers/core_dynamic.py @@ -45,7 +45,7 @@ def login_user(data: schemas.UserLoginSchema = Body(...)): detail="Invalid captcha." ) - r = users.authenticate(data.email, data.password) + r = users.authenticate(data.email, data.password.get_secret_value()) if r is None: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, @@ -129,7 +129,7 @@ def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSc @app.post('/client/members', tags=["client"]) def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(), + return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data, background_tasks=background_tasks) @@ -160,7 +160,7 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = if user["expiredChange"]: return {"errors": ["expired change, please re-use the invitation link"]} - return users.set_password_invitation(new_password=data.password, user_id=user["userId"]) + return users.set_password_invitation(new_password=data.password.get_secret_value(), user_id=user["userId"]) @app.put('/client/members/{memberId}', tags=["client"]) @@ -193,8 +193,10 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"]) def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): - if isinstance(sessionId, str): + if not sessionId.isnumeric(): return {"errors": ["session not found"]} + else: + sessionId = int(sessionId) data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, include_fav_viewed=True, group_metadata=True, context=context) if data is None: @@ -207,11 +209,27 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba } +@app.post('/{projectId}/sessions/search', tags=["sessions"]) +def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id) + return {'data': data} + + +@app.post('/{projectId}/sessions/search/ids', tags=["sessions"]) +def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True) + return {'data': data} + + @app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"]) def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): - if isinstance(sessionId, str): + if not sessionId.isnumeric(): return {"errors": ["session not found"]} + else: + sessionId = int(sessionId) data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True, include_fav_viewed=True, group_metadata=True, context=context) if data is None: @@ -227,8 +245,10 @@ def get_session_events(projectId: int, sessionId: Union[int, str], background_ta @app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"]) def get_session_events(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): - if isinstance(sessionId, str): + if not sessionId.isnumeric(): return {"errors": ["session not found"]} + else: + sessionId = int(sessionId) data = sessions_replay.get_events(project_id=projectId, session_id=sessionId) if data is None: return {"errors": ["session not found"]} @@ -249,18 +269,6 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str, } -@app.post('/{projectId}/errors/search', tags=['errors']) -def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return {"data": errors.search(data, projectId, user_id=context.user_id)} - - -@app.get('/{projectId}/errors/stats', tags=['errors']) -def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, - context: schemas.CurrentContext = Depends(OR_context)): - return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp) - - @app.get('/{projectId}/errors/{errorId}', tags=['errors']) def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): @@ -272,15 +280,6 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun return data -@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors']) -def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7), - endDate: int = TimeUTC.now(), density: int = 7, - context: schemas.CurrentContext = Depends(OR_context)): - data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId, - **{"startDate": startDate, "endDate": endDate, "density": density}) - return data - - @app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors']) def errors_get_details_sourcemaps(projectId: int, errorId: str, context: schemas.CurrentContext = Depends(OR_context)): @@ -329,9 +328,10 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Replay file not found"]} - if isinstance(sessionId, str): - print(f"{sessionId} not a valid number.") + if not sessionId.isnumeric(): return not_found + else: + sessionId = int(sessionId) if not sessions.session_exists(project_id=projectId, session_id=sessionId): print(f"{projectId}/{sessionId} not found in DB.") if not assist.session_exists(project_id=projectId, session_id=sessionId): @@ -349,9 +349,10 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Devtools file not found"]} - if isinstance(sessionId, str): - print(f"{sessionId} not a valid number.") + if not sessionId.isnumeric(): return not_found + else: + sessionId = int(sessionId) if not sessions.session_exists(project_id=projectId, session_id=sessionId): print(f"{projectId}/{sessionId} not found in DB.") if not assist.session_exists(project_id=projectId, session_id=sessionId): @@ -484,7 +485,7 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), @app.post('/{projectId}/click_maps/search', tags=["click maps"]) -def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...), +def click_map_search(projectId: int, data: schemas.ClickMapSessionsSearch = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)} @@ -515,7 +516,7 @@ def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.Fea @app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"]) -async def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)): +def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)): return {"data": feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)} diff --git a/api/routers/subs/health.py b/api/routers/subs/health.py index 0414467ea..58d5625b7 100644 --- a/api/routers/subs/health.py +++ b/api/routers/subs/health.py @@ -1,3 +1,4 @@ +from decouple import config from fastapi import HTTPException, status from chalicelib.core import health, tenants @@ -8,6 +9,8 @@ public_app, app, app_apikey = get_routers() @app.get('/healthz', tags=["health-check"]) def get_global_health_status(): + if config("LOCAL_DEV", cast=bool, default=False): + return {"data": ""} return {"data": health.get_health()} diff --git a/api/routers/subs/insights.py b/api/routers/subs/insights.py index 3fb71c09c..fe5b33498 100644 --- a/api/routers/subs/insights.py +++ b/api/routers/subs/insights.py @@ -11,10 +11,10 @@ public_app, app, app_apikey = get_routers() async def get_insights_journey(projectId: int): return {"data": product_analytics.path_analysis(project_id=projectId, data=schemas.PathAnalysisSchema())} -# -# @app.post('/{projectId}/insights/journey', tags=["insights"]) -# async def get_insights_journey(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): -# return {"data": product_analytics.journey(project_id=projectId, data=data)} + +@app.post('/{projectId}/insights/journey', tags=["insights"]) +async def get_insights_journey(projectId: int, data: schemas.PathAnalysisSchema = Body(...)): + return {"data": product_analytics.path_analysis(project_id=projectId, data=data)} # # # @app.post('/{projectId}/insights/users_acquisition', tags=["insights"]) diff --git a/api/routers/subs/metrics.py b/api/routers/subs/metrics.py index 646d893ff..c3a64c5d4 100644 --- a/api/routers/subs/metrics.py +++ b/api/routers/subs/metrics.py @@ -11,7 +11,7 @@ public_app, app, app_apikey = get_routers() @app.post('/{projectId}/dashboards', tags=["dashboard"]) -@app.put('/{projectId}/dashboards', tags=["dashboard"]) +# @app.put('/{projectId}/dashboards', tags=["dashboard"]) def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data) @@ -30,7 +30,7 @@ def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont return {"data": data} -@app.post('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) +# @app.post('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) @app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -50,8 +50,8 @@ def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont @app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"]) -@app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) -@app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) +# @app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) +# @app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) def add_card_to_dashboard(projectId: int, dashboardId: int, data: schemas.AddWidgetToDashboardPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -60,7 +60,7 @@ def add_card_to_dashboard(projectId: int, dashboardId: int, @app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) -@app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) +# @app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -69,7 +69,7 @@ def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int, @app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) -@app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) +# @app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int, data: schemas.UpdateWidgetPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -96,18 +96,18 @@ def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int @app.post('/{projectId}/cards/try', tags=["cards"]) -@app.post('/{projectId}/metrics/try', tags=["dashboard"]) -@app.put('/{projectId}/metrics/try', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"]) -@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/try', tags=["dashboard"]) +# @app.put('/{projectId}/metrics/try', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"]) +# @app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"]) def try_card(projectId: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)} @app.post('/{projectId}/cards/try/sessions', tags=["cards"]) -@app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"]) def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data) @@ -115,48 +115,50 @@ def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(.. @app.post('/{projectId}/cards/try/issues', tags=["cards"]) -@app.post('/{projectId}/metrics/try/issues', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/try/issues', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"]) def try_card_funnel_issues(projectId: int, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): if len(data.series) == 0: return {"data": []} - data.series[0].filter.startDate = data.startTimestamp - data.series[0].filter.endDate = data.endTimestamp + data.series[0].filter.startTimestamp = data.startTimestamp + data.series[0].filter.endTimestamp = data.endTimestamp data = funnels.get_issues_on_the_fly_widget(project_id=projectId, data=data.series[0].filter) return {"data": data} @app.get('/{projectId}/cards', tags=["cards"]) -@app.get('/{projectId}/metrics', tags=["dashboard"]) -@app.get('/{projectId}/custom_metrics', tags=["customMetrics"]) +# @app.get('/{projectId}/metrics', tags=["dashboard"]) +# @app.get('/{projectId}/custom_metrics', tags=["customMetrics"]) def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)} @app.post('/{projectId}/cards', tags=["cards"]) -@app.post('/{projectId}/metrics', tags=["dashboard"]) -@app.put('/{projectId}/metrics', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics', tags=["customMetrics"]) -@app.put('/{projectId}/custom_metrics', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics', tags=["dashboard"]) +# @app.put('/{projectId}/metrics', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics', tags=["customMetrics"]) +# @app.put('/{projectId}/custom_metrics', tags=["customMetrics"]) def create_card(projectId: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data) + return custom_metrics.create_card(project_id=projectId, user_id=context.user_id, data=data) @app.post('/{projectId}/cards/search', tags=["cards"]) -@app.post('/{projectId}/metrics/search', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/search', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/search', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/search', tags=["customMetrics"]) def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)} @app.get('/{projectId}/cards/{metric_id}', tags=["cards"]) -@app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) -@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +# @app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +# @app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): - if not isinstance(metric_id, int): + if metric_id.isnumeric(): + metric_id = int(metric_id) + else: return {"errors": ["invalid card_id"]} data = custom_metrics.get_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id) if data is None: @@ -173,8 +175,8 @@ def get_card(projectId: int, metric_id: Union[int, str], context: schemas.Curren @app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"]) -@app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"]) def get_card_sessions(projectId: int, metric_id: int, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -185,13 +187,15 @@ def get_card_sessions(projectId: int, metric_id: int, @app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"]) -@app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"]) def get_card_funnel_issues(projectId: int, metric_id: Union[int, str], data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - if not isinstance(metric_id, int): - return {"errors": [f"invalid card_id: {metric_id}"]} + if metric_id.isnumeric(): + metric_id = int(metric_id) + else: + return {"errors": ["invalid card_id"]} data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) @@ -201,8 +205,8 @@ def get_card_funnel_issues(projectId: int, metric_id: Union[int, str], @app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) -@app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"]) def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -214,22 +218,22 @@ def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: st @app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"]) -@app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"]) def get_custom_metric_errors_list(projectId: int, metric_id: int, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, metric_id=metric_id, - data=data) + data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, + metric_id=metric_id, data=data) if data is None: return {"errors": ["custom metric not found"]} return {"data": data} @app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"]) -@app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"]) -def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardChartSchema = Body(...), +# @app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"]) +def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) @@ -237,25 +241,25 @@ def get_card_chart(projectId: int, metric_id: int, request: Request, data: schem @app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"]) -@app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) -@app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) -@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) -def update_custom_metric(projectId: int, metric_id: int, data: schemas.UpdateCardSchema = Body(...), +# @app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +# @app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +# @app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +def update_custom_metric(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) + data = custom_metrics.update_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) if data is None: return {"errors": ["custom metric not found"]} return {"data": data} @app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"]) -@app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) -@app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) -@app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) +# @app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) +# @app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) def update_custom_metric_state(projectId: int, metric_id: int, - data: schemas.UpdateCustomMetricsStatusSchema = Body(...), + data: schemas.UpdateCardStatusSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return { "data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id, @@ -263,8 +267,8 @@ def update_custom_metric_state(projectId: int, metric_id: int, @app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"]) -@app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) -@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +# @app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +# @app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) def delete_custom_metric(projectId: int, metric_id: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)} + return {"data": custom_metrics.delete_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)} diff --git a/api/schemas.py b/api/schemas.py deleted file mode 100644 index 991ef4c65..000000000 --- a/api/schemas.py +++ /dev/null @@ -1,1457 +0,0 @@ -from enum import Enum -from typing import Optional, List, Union, Literal, Any - -from pydantic import BaseModel, Field, EmailStr, HttpUrl, root_validator, validator -from pydantic.types import Json - -from chalicelib.utils.TimeUTC import TimeUTC -import re - - -def attribute_to_camel_case(snake_str): - components = snake_str.split("_") - return components[0] + ''.join(x.title() for x in components[1:]) - - -def transform_email(email: str) -> str: - return email.lower().strip() if isinstance(email, str) else email - - -def remove_whitespace(value: str) -> str: - return " ".join(value.split()) if isinstance(value, str) else value - - -class _Grecaptcha(BaseModel): - g_recaptcha_response: Optional[str] = Field(None, alias='g-recaptcha-response') - - -class UserLoginSchema(_Grecaptcha): - email: EmailStr = Field(...) - password: str = Field(...) - _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) - - -class UserSignupSchema(UserLoginSchema): - fullname: str = Field(...) - organizationName: str = Field(...) - - class Config: - alias_generator = attribute_to_camel_case - - -class EditAccountSchema(BaseModel): - name: Optional[str] = Field(None) - tenantName: Optional[str] = Field(None) - opt_out: Optional[bool] = Field(None) - - _transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace) - _transform_tenantName = validator('tenantName', pre=True, allow_reuse=True)(remove_whitespace) - - -class ForgetPasswordPayloadSchema(_Grecaptcha): - email: EmailStr = Field(...) - - _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) - - -class EditUserPasswordSchema(BaseModel): - old_password: str = Field(...) - new_password: str = Field(...) - - class Config: - alias_generator = attribute_to_camel_case - - -class UpdateTenantSchema(BaseModel): - name: Optional[str] = Field(None) - opt_out: Optional[bool] = Field(None) - tenant_name: Optional[str] = Field(None) - - class Config: - alias_generator = attribute_to_camel_case - - -class CreateProjectSchema(BaseModel): - name: str = Field(default="my first project") - _transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace) - - -class CurrentAPIContext(BaseModel): - tenant_id: int = Field(...) - - -class CurrentContext(CurrentAPIContext): - user_id: int = Field(...) - email: EmailStr = Field(...) - - _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) - - -class AddCollaborationSchema(BaseModel): - name: str = Field(...) - url: HttpUrl = Field(...) - _transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace) - _transform_url = validator('url', pre=True, allow_reuse=True)(remove_whitespace) - - -class EditCollaborationSchema(AddCollaborationSchema): - name: Optional[str] = Field(None) - - -class CreateNotificationSchema(BaseModel): - token: str = Field(...) - notifications: List = Field(...) - - -class _TimedSchema(BaseModel): - startTimestamp: int = Field(default=None) - endTimestamp: int = Field(default=None) - - @root_validator - def time_validator(cls, values): - if values.get("startTimestamp") is not None and values.get("endTimestamp") is not None: - assert values.get("startTimestamp") < values.get("endTimestamp"), \ - "endTimestamp must be greater than startTimestamp" - return values - - -class NotificationsViewSchema(_TimedSchema): - ids: Optional[List] = Field(default=[]) - startTimestamp: Optional[int] = Field(default=None) - endTimestamp: Optional[int] = Field(default=None) - - -class GithubSchema(BaseModel): - token: str = Field(...) - - -class JiraSchema(GithubSchema): - username: str = Field(...) - url: HttpUrl = Field(...) - - @validator('url') - def transform_url(cls, v: HttpUrl): - return HttpUrl.build(scheme=v.scheme.lower(), host=v.host.lower()) - - -class CreateEditWebhookSchema(BaseModel): - webhookId: Optional[int] = Field(None) - endpoint: str = Field(...) - authHeader: Optional[str] = Field(None) - name: Optional[str] = Field(...) - _transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace) - - -class CreateMemberSchema(BaseModel): - userId: Optional[int] = Field(None) - name: str = Field(...) - email: EmailStr = Field(...) - admin: bool = Field(False) - - _transform_email = validator('email', pre=True, allow_reuse=True)(transform_email) - _transform_name = validator('name', pre=True, allow_reuse=True)(remove_whitespace) - - -class EditMemberSchema(BaseModel): - name: str = Field(...) - email: EmailStr = Field(...) - admin: bool = Field(False) - - -class EditPasswordByInvitationSchema(BaseModel): - invitation: str = Field(...) - passphrase: str = Field(..., alias="pass") - password: str = Field(...) - - -class AssignmentSchema(BaseModel): - assignee: str = Field(...) - description: str = Field(...) - title: str = Field(...) - issue_type: str = Field(...) - _transform_title = validator('title', pre=True, allow_reuse=True)(remove_whitespace) - - class Config: - alias_generator = attribute_to_camel_case - - -class CommentAssignmentSchema(BaseModel): - message: str = Field(...) - - -class IntegrationNotificationSchema(BaseModel): - comment: Optional[str] = Field(None) - - -class GdprSchema(BaseModel): - maskEmails: bool = Field(...) - sampleRate: int = Field(...) - maskNumbers: bool = Field(...) - defaultInputMode: str = Field(...) - - -class SampleRateSchema(BaseModel): - rate: int = Field(...) - captureAll: bool = Field(False) - - -class WeeklyReportConfigSchema(BaseModel): - weekly_report: bool = Field(True) - - class Config: - alias_generator = attribute_to_camel_case - - -class DatadogSchema(BaseModel): - apiKey: str = Field(...) - applicationKey: str = Field(...) - - -class StackdriverSchema(BaseModel): - serviceAccountCredentials: str = Field(...) - logName: str = Field(...) - - -class NewrelicSchema(BaseModel): - applicationId: str = Field(...) - xQueryKey: str = Field(...) - region: str = Field(...) - - -class RollbarSchema(BaseModel): - accessToken: str = Field(...) - - -class BugsnagBasicSchema(BaseModel): - authorizationToken: str = Field(...) - - -class BugsnagSchema(BugsnagBasicSchema): - bugsnagProjectId: str = Field(...) - - -class CloudwatchBasicSchema(BaseModel): - awsAccessKeyId: str = Field(...) - awsSecretAccessKey: str = Field(...) - region: str = Field(...) - - -class CloudwatchSchema(CloudwatchBasicSchema): - logGroupName: str = Field(...) - - -class ElasticsearchBasicSchema(BaseModel): - host: str = Field(...) - port: int = Field(...) - apiKeyId: str = Field(...) - apiKey: str = Field(...) - - -class ElasticsearchSchema(ElasticsearchBasicSchema): - indexes: str = Field(...) - - -class SumologicSchema(BaseModel): - accessId: str = Field(...) - accessKey: str = Field(...) - region: str = Field(...) - - -class MetadataBasicSchema(BaseModel): - index: Optional[int] = Field(None) - key: str = Field(...) - _transform_key = validator('key', pre=True, allow_reuse=True)(remove_whitespace) - - -class MetadataListSchema(BaseModel): - list: List[MetadataBasicSchema] = Field(...) - - -class _AlertMessageSchema(BaseModel): - type: str = Field(...) - value: str = Field(...) - - -class AlertDetectionType(str, Enum): - percent = "percent" - change = "change" - - -class _AlertOptionSchema(BaseModel): - message: List[_AlertMessageSchema] = Field([]) - currentPeriod: Literal[15, 30, 60, 120, 240, 1440] = Field(...) - previousPeriod: Literal[15, 30, 60, 120, 240, 1440] = Field(15) - lastNotification: Optional[int] = Field(None) - renotifyInterval: Optional[int] = Field(720) - - -class AlertColumn(str, Enum): - performance__dom_content_loaded__average = "performance.dom_content_loaded.average" - performance__first_meaningful_paint__average = "performance.first_meaningful_paint.average" - performance__page_load_time__average = "performance.page_load_time.average" - performance__dom_build_time__average = "performance.dom_build_time.average" - performance__speed_index__average = "performance.speed_index.average" - performance__page_response_time__average = "performance.page_response_time.average" - performance__ttfb__average = "performance.ttfb.average" - performance__time_to_render__average = "performance.time_to_render.average" - performance__image_load_time__average = "performance.image_load_time.average" - performance__request_load_time__average = "performance.request_load_time.average" - resources__load_time__average = "resources.load_time.average" - resources__missing__count = "resources.missing.count" - errors__4xx_5xx__count = "errors.4xx_5xx.count" - errors__4xx__count = "errors.4xx.count" - errors__5xx__count = "errors.5xx.count" - errors__javascript__impacted_sessions__count = "errors.javascript.impacted_sessions.count" - performance__crashes__count = "performance.crashes.count" - errors__javascript__count = "errors.javascript.count" - errors__backend__count = "errors.backend.count" - custom = "CUSTOM" - - -class MathOperator(str, Enum): - _equal = "=" - _less = "<" - _greater = ">" - _less_eq = "<=" - _greater_eq = ">=" - - -class _AlertQuerySchema(BaseModel): - left: Union[AlertColumn, int] = Field(...) - right: float = Field(...) - operator: MathOperator = Field(...) - - -class AlertDetectionMethod(str, Enum): - threshold = "threshold" - change = "change" - - -class AlertSchema(BaseModel): - name: str = Field(...) - detection_method: AlertDetectionMethod = Field(...) - change: Optional[AlertDetectionType] = Field(default=AlertDetectionType.change) - description: Optional[str] = Field(None) - options: _AlertOptionSchema = Field(...) - query: _AlertQuerySchema = Field(...) - series_id: Optional[int] = Field(None) - - @root_validator(pre=True) - def transform_alert(cls, values): - values["seriesId"] = None - if isinstance(values["query"]["left"], int): - values["seriesId"] = values["query"]["left"] - values["query"]["left"] = AlertColumn.custom - - return values - - @root_validator - def alert_validator(cls, values): - if values.get("query") is not None and values["query"].left == AlertColumn.custom: - assert values.get("series_id") is not None, "series_id should not be null for CUSTOM alert" - return values - - class Config: - alias_generator = attribute_to_camel_case - - -class SourcemapUploadPayloadSchema(BaseModel): - urls: List[str] = Field(..., alias="URL") - - -class ErrorSource(str, Enum): - js_exception = "js_exception" - bugsnag = "bugsnag" - cloudwatch = "cloudwatch" - datadog = "datadog" - newrelic = "newrelic" - rollbar = "rollbar" - sentry = "sentry" - stackdriver = "stackdriver" - sumologic = "sumologic" - - -class EventType(str, Enum): - click = "click" - input = "input" - location = "location" - custom = "custom" - request = "request" - request_details = "fetch" - graphql = "graphql" - state_action = "stateAction" - error = "error" - click_ios = "clickIos" - input_ios = "inputIos" - view_ios = "viewIos" - custom_ios = "customIos" - request_ios = "requestIos" - error_ios = "errorIos" - - -class PerformanceEventType(str, Enum): - location_dom_complete = "domComplete" - location_largest_contentful_paint_time = "largestContentfulPaintTime" - time_between_events = "timeBetweenEvents" - location_ttfb = "ttfb" - location_avg_cpu_load = "avgCpuLoad" - location_avg_memory_usage = "avgMemoryUsage" - fetch_failed = "fetchFailed" - # fetch_duration = "FETCH_DURATION" - - -class FilterType(str, Enum): - user_os = "userOs" - user_browser = "userBrowser" - user_device = "userDevice" - user_country = "userCountry" - user_city = "userCity" - user_state = "userState" - user_id = "userId" - user_anonymous_id = "userAnonymousId" - referrer = "referrer" - rev_id = "revId" - # IOS - user_os_ios = "userOsIos" - user_device_ios = "userDeviceIos" - user_country_ios = "userCountryIos" - user_id_ios = "userIdIos" - user_anonymous_id_ios = "userAnonymousIdIos" - rev_id_ios = "revIdIos" - # - duration = "duration" - platform = "platform" - metadata = "metadata" - issue = "issue" - events_count = "eventsCount" - utm_source = "utmSource" - utm_medium = "utmMedium" - utm_campaign = "utmCampaign" - - -class SearchEventOperator(str, Enum): - _is = "is" - _is_any = "isAny" - _on = "on" - _on_any = "onAny" - _is_not = "isNot" - _is_undefined = "isUndefined" - _not_on = "notOn" - _contains = "contains" - _not_contains = "notContains" - _starts_with = "startsWith" - _ends_with = "endsWith" - - -class ClickEventExtraOperator(str, Enum): - _on_selector = "onSelector" - _on_text = "onText" - - -class IssueFilterOperator(str, Enum): - _on_selector = ClickEventExtraOperator._on_selector.value - - -class PlatformType(str, Enum): - mobile = "mobile" - desktop = "desktop" - tablet = "tablet" - - -class SearchEventOrder(str, Enum): - _then = "then" - _or = "or" - _and = "and" - - -class IssueType(str, Enum): - click_rage = 'click_rage' - dead_click = 'dead_click' - excessive_scrolling = 'excessive_scrolling' - bad_request = 'bad_request' - missing_resource = 'missing_resource' - memory = 'memory' - cpu = 'cpu' - slow_resource = 'slow_resource' - slow_page_load = 'slow_page_load' - crash = 'crash' - custom = 'custom' - js_exception = 'js_exception' - mouse_thrashing = 'mouse_thrashing' - - -class MetricFormatType(str, Enum): - session_count = 'sessionCount' - - -class __MixedSearchFilter(BaseModel): - is_event: bool = Field(...) - - @root_validator(pre=True) - def remove_duplicate_values(cls, values): - if values.get("value") is not None: - if len(values["value"]) > 0 \ - and (isinstance(values["value"][0], int) or isinstance(values["value"][0], dict)): - return values - values["value"] = list(set(values["value"])) - return values - - class Config: - alias_generator = attribute_to_camel_case - - -class HttpMethod(str, Enum): - _get = 'GET' - _head = 'HEAD' - _post = 'POST' - _put = 'PUT' - _delete = 'DELETE' - _connect = 'CONNECT' - _option = 'OPTIONS' - _trace = 'TRACE' - _patch = 'PATCH' - - -class FetchFilterType(str, Enum): - _url = "fetchUrl" # FETCH_URL - _status_code = "fetchStatusCode" # FETCH_STATUS_CODE - _method = "fetchMethod" # FETCH_METHOD - _duration = "fetchDuration" # FETCH_DURATION - _request_body = "fetchRequestBody" # FETCH_REQUEST_BODY - _response_body = "fetchResponseBody" # FETCH_RESPONSE_BODY - - -class GraphqlFilterType(str, Enum): - _name = "graphqlName" # GRAPHQL_NAME - _method = "graphqlMethod" # GRAPHQL_METHOD - _request_body = "graphqlRequestBody" # GRAPHQL_REQUEST_BODY - _response_body = "graphqlResponseBody" # GRAPHQL_RESPONSE_BODY - - -class IssueFilterType(str, Enum): - _selector = "CLICK_SELECTOR" - - -class RequestGraphqlFilterSchema(BaseModel): - type: Union[FetchFilterType, GraphqlFilterType] = Field(...) - value: List[Union[int, str]] = Field(...) - operator: Union[SearchEventOperator, MathOperator] = Field(...) - - -class IssueFilterSchema(BaseModel): - type: IssueFilterType = Field(...) - value: List[str] = Field(...) - operator: IssueFilterOperator = Field(...) - - -class _SessionSearchEventRaw(__MixedSearchFilter): - is_event: bool = Field(default=True, const=True) - value: List[str] = Field(...) - type: Union[EventType, PerformanceEventType] = Field(...) - operator: Union[SearchEventOperator, ClickEventExtraOperator] = Field(...) - source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None) - sourceOperator: Optional[MathOperator] = Field(default=None) - filters: Optional[List[Union[RequestGraphqlFilterSchema, IssueFilterSchema]]] = Field(default=None) - - @root_validator(pre=True) - def transform(cls, values): - if values.get("type") is None: - return values - values["type"] = { - "CLICK": EventType.click.value, - "INPUT": EventType.input.value, - "LOCATION": EventType.location.value, - "CUSTOM": EventType.custom.value, - "REQUEST": EventType.request.value, - "FETCH": EventType.request_details.value, - "GRAPHQL": EventType.graphql.value, - "STATEACTION": EventType.state_action.value, - "ERROR": EventType.error.value, - "CLICK_IOS": EventType.click_ios.value, - "INPUT_IOS": EventType.input_ios.value, - "VIEW_IOS": EventType.view_ios.value, - "CUSTOM_IOS": EventType.custom_ios.value, - "REQUEST_IOS": EventType.request_ios.value, - "ERROR_IOS": EventType.error_ios.value, - "DOM_COMPLETE": PerformanceEventType.location_dom_complete.value, - "LARGEST_CONTENTFUL_PAINT_TIME": PerformanceEventType.location_largest_contentful_paint_time.value, - "TIME_BETWEEN_EVENTS": PerformanceEventType.time_between_events.value, - "TTFB": PerformanceEventType.location_ttfb.value, - "AVG_CPU_LOAD": PerformanceEventType.location_avg_cpu_load.value, - "AVG_MEMORY_USAGE": PerformanceEventType.location_avg_memory_usage.value, - "FETCH_FAILED": PerformanceEventType.fetch_failed.value, - }.get(values["type"], values["type"]) - return values - - @root_validator - def event_validator(cls, values): - if isinstance(values.get("type"), PerformanceEventType): - if values.get("type") == PerformanceEventType.fetch_failed: - return values - # assert values.get("source") is not None, "source should not be null for PerformanceEventType" - # assert isinstance(values["source"], list) and len(values["source"]) > 0, \ - # "source should not be empty for PerformanceEventType" - assert values.get("sourceOperator") is not None, \ - "sourceOperator should not be null for PerformanceEventType" - if values["type"] == PerformanceEventType.time_between_events: - assert values["sourceOperator"] != MathOperator._equal, \ - f"{MathOperator._equal} is not allowed for duration of {PerformanceEventType.time_between_events}" - assert len(values.get("value", [])) == 2, \ - f"must provide 2 Events as value for {PerformanceEventType.time_between_events}" - assert isinstance(values["value"][0], _SessionSearchEventRaw) \ - and isinstance(values["value"][1], _SessionSearchEventRaw), \ - f"event should be of type _SessionSearchEventRaw for {PerformanceEventType.time_between_events}" - assert len(values["source"]) > 0 and isinstance(values["source"][0], int), \ - f"source of type int is required for {PerformanceEventType.time_between_events}" - else: - assert "source" in values, f"source is required for {values.get('type')}" - assert isinstance(values["source"], list), f"source of type list is required for {values.get('type')}" - for c in values["source"]: - assert isinstance(c, int), f"source value should be of type int for {values.get('type')}" - elif values.get("type") == EventType.error and values.get("source") is None: - values["source"] = [ErrorSource.js_exception] - elif values.get("type") == EventType.request_details: - assert isinstance(values.get("filters"), List) and len(values.get("filters", [])) > 0, \ - f"filters should be defined for {EventType.request_details}" - elif values.get("type") == EventType.graphql: - assert isinstance(values.get("filters"), List) and len(values.get("filters", [])) > 0, \ - f"filters should be defined for {EventType.graphql}" - - if isinstance(values.get("operator"), ClickEventExtraOperator): - assert values.get("type") == EventType.click, \ - f"operator:{values['operator']} is only available for event-type: {EventType.click}" - return values - - -class _SessionSearchEventSchema(_SessionSearchEventRaw): - value: Union[List[Union[_SessionSearchEventRaw, str]], str] = Field(...) - - -def transform_old_FilterType(cls, values): - if values.get("type") is None: - return values - values["type"] = { - "USEROS": FilterType.user_os.value, - "USERBROWSER": FilterType.user_browser.value, - "USERDEVICE": FilterType.user_device.value, - "USERCOUNTRY": FilterType.user_country.value, - "USERID": FilterType.user_id.value, - "USERANONYMOUSID": FilterType.user_anonymous_id.value, - "REFERRER": FilterType.referrer.value, - "REVID": FilterType.rev_id.value, - "USEROS_IOS": FilterType.user_os_ios.value, - "USERDEVICE_IOS": FilterType.user_device_ios.value, - "USERCOUNTRY_IOS": FilterType.user_country_ios.value, - "USERID_IOS": FilterType.user_id_ios.value, - "USERANONYMOUSID_IOS": FilterType.user_anonymous_id_ios.value, - "REVID_IOS": FilterType.rev_id_ios.value, - "DURATION": FilterType.duration.value, - "PLATFORM": FilterType.platform.value, - "METADATA": FilterType.metadata.value, - "ISSUE": FilterType.issue.value, - "EVENTS_COUNT": FilterType.events_count.value, - "UTM_SOURCE": FilterType.utm_source.value, - "UTM_MEDIUM": FilterType.utm_medium.value, - "UTM_CAMPAIGN": FilterType.utm_campaign.value - }.get(values["type"], values["type"]) - return values - - -class SessionSearchFilterSchema(__MixedSearchFilter): - is_event: bool = Field(False, const=False) - # TODO: remove this if there nothing broken from the UI - # value: Union[Optional[Union[IssueType, PlatformType, int, str]], - # Optional[List[Union[IssueType, PlatformType, int, str]]]] = Field(...) - value: List[Union[IssueType, PlatformType, int, str]] = Field(default=[]) - type: FilterType = Field(...) - operator: Union[SearchEventOperator, MathOperator] = Field(...) - source: Optional[Union[ErrorSource, str]] = Field(default=None) - filters: List[IssueFilterSchema] = Field(default=[]) - - transform = root_validator(pre=True, allow_reuse=True)(transform_old_FilterType) - - @root_validator - def filter_validator(cls, values): - if values.get("type") == FilterType.metadata: - assert values.get("source") is not None and len(values["source"]) > 0, \ - "must specify a valid 'source' for metadata filter" - elif values.get("type") == FilterType.issue: - for v in values.get("value"): - assert isinstance(v, IssueType), f"value should be of type IssueType for {values.get('type')} filter" - elif values.get("type") == FilterType.platform: - for v in values.get("value"): - assert isinstance(v, PlatformType), \ - f"value should be of type PlatformType for {values.get('type')} filter" - elif values.get("type") == FilterType.events_count: - assert isinstance(values.get("operator"), MathOperator), \ - f"operator should be of type MathOperator for {values.get('type')} filter" - for v in values.get("value"): - assert isinstance(v, int), f"value should be of type int for {values.get('type')} filter" - else: - assert isinstance(values.get("operator"), SearchEventOperator), \ - f"operator should be of type SearchEventOperator for {values.get('type')} filter" - return values - - -class _PaginatedSchema(BaseModel): - limit: int = Field(default=200, gt=0, le=200) - page: int = Field(default=1, gt=0) - - -class SortOrderType(str, Enum): - asc = "ASC" - desc = "DESC" - - -class SessionsSearchPayloadSchema(_PaginatedSchema): - events: List[_SessionSearchEventSchema] = Field([]) - filters: List[SessionSearchFilterSchema] = Field([]) - startDate: int = Field(None) - endDate: int = Field(None) - sort: str = Field(default="startTs") - order: SortOrderType = Field(default=SortOrderType.desc) - events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then) - group_by_user: bool = Field(default=False) - bookmarked: bool = Field(default=False) - - @root_validator(pre=True) - def transform_order(cls, values): - if values.get("sort") is None: - values["sort"] = "startTs" - - if values.get("order") is None: - values["order"] = SortOrderType.desc - else: - values["order"] = values["order"].upper() - return values - - class Config: - alias_generator = attribute_to_camel_case - - -class FlatSessionsSearch(BaseModel): - events: Optional[List[_SessionSearchEventSchema]] = Field([]) - filters: List[Union[SessionSearchFilterSchema, _SessionSearchEventSchema]] = Field([]) - - @root_validator(pre=True) - def flat_to_original(cls, values): - # in case the old search body was passed - if len(values.get("events", [])) > 0: - for v in values["events"]: - v["isEvent"] = True - for v in values.get("filters", []): - v["isEvent"] = False - else: - n_filters = [] - n_events = [] - for v in values.get("filters", []): - if v.get("isEvent"): - n_events.append(v) - else: - v["isEvent"] = False - n_filters.append(v) - values["events"] = n_events - values["filters"] = n_filters - return values - - -class FlatSessionsSearchPayloadSchema(FlatSessionsSearch, SessionsSearchPayloadSchema): - pass - - -class SessionsSearchCountSchema(FlatSessionsSearchPayloadSchema): - # class SessionsSearchCountSchema(SessionsSearchPayloadSchema): - sort: Optional[str] = Field(default=None) - order: Optional[str] = Field(default=None) - - -class FunnelSearchPayloadSchema(FlatSessionsSearchPayloadSchema): - # class FunnelSearchPayloadSchema(SessionsSearchPayloadSchema): - range_value: Optional[str] = Field(None) - sort: Optional[str] = Field(None) - order: Optional[str] = Field(None) - events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True) - group_by_user: Optional[bool] = Field(default=False, const=True) - rangeValue: Optional[str] = Field(None) - - @root_validator(pre=True) - def enforce_default_values(cls, values): - values["eventsOrder"] = SearchEventOrder._then - values["groupByUser"] = False - return values - - class Config: - alias_generator = attribute_to_camel_case - - -class FunnelSchema(BaseModel): - name: str = Field(...) - filter: FunnelSearchPayloadSchema = Field([]) - is_public: bool = Field(default=False) - - class Config: - alias_generator = attribute_to_camel_case - - -class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema): - # class FunnelInsightsPayloadSchema(SessionsSearchPayloadSchema): - sort: Optional[str] = Field(None) - order: Optional[str] = Field(None) - events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True) - group_by_user: Optional[bool] = Field(default=False, const=True) - rangeValue: Optional[str] = Field(None) - - -class ErrorStatus(str, Enum): - all = 'all' - unresolved = 'unresolved' - resolved = 'resolved' - ignored = 'ignored' - - -class ErrorSort(str, Enum): - occurrence = 'occurrence' - users_count = 'users' - sessions_count = 'sessions' - - -class SearchErrorsSchema(FlatSessionsSearchPayloadSchema): - sort: ErrorSort = Field(default=ErrorSort.occurrence) - density: Optional[int] = Field(7) - status: Optional[ErrorStatus] = Field(default=ErrorStatus.all) - query: Optional[str] = Field(default=None) - - -class ProductAnalyticsFilterType(str, Enum): - event_type = 'eventType' - start_point = 'startPoint' - user_id = FilterType.user_id.value - - -class ProductAnalyticsEventType(str, Enum): - click = EventType.click.value - input = EventType.input.value - location = EventType.location.value - custom_event = EventType.custom.value - - -class ProductAnalyticsFilter(BaseModel): - type: ProductAnalyticsFilterType = Field(...) - operator: Union[SearchEventOperator, ClickEventExtraOperator] = Field(...) - value: List[Union[ProductAnalyticsEventType | str]] = Field(...) - - @root_validator - def validator(cls, values): - if values.get("type") == ProductAnalyticsFilterType.event_type: - assert values.get("value") is not None and len(values["value"]) > 0, \ - f"value must be provided for type:{ProductAnalyticsFilterType.event_type}" - assert isinstance(values["value"][0], ProductAnalyticsEventType), \ - f"value must be of type {ProductAnalyticsEventType} for type:{ProductAnalyticsFilterType.event_type}" - - return values - - -class PathAnalysisSchema(_TimedSchema): - startTimestamp: int = Field(TimeUTC.now(delta_days=-1)) - endTimestamp: int = Field(TimeUTC.now()) - density: int = Field(7) - filters: List[ProductAnalyticsFilter] = Field(default=[]) - type: Optional[str] = Field(default=None) - - class Config: - alias_generator = attribute_to_camel_case - - -class AssistSearchPayloadSchema(BaseModel): - filters: List[dict] = Field([]) - - -class SentrySchema(BaseModel): - projectSlug: str = Field(...) - organizationSlug: str = Field(...) - token: str = Field(...) - - -class MobileSignPayloadSchema(BaseModel): - keys: List[str] = Field(...) - - -class CardSeriesFilterSchema(SearchErrorsSchema): - startDate: Optional[int] = Field(default=None) - endDate: Optional[int] = Field(default=None) - sort: Optional[str] = Field(default=None) - order: SortOrderType = Field(default=SortOrderType.desc) - group_by_user: Optional[bool] = Field(default=False, const=True) - - -class CardSeriesSchema(BaseModel): - series_id: Optional[int] = Field(None) - name: Optional[str] = Field(None) - index: Optional[int] = Field(None) - filter: Optional[Union[CardSeriesFilterSchema | PathAnalysisSchema]] = Field(default=None) - - class Config: - alias_generator = attribute_to_camel_case - - -class MetricTimeseriesViewType(str, Enum): - line_chart = "lineChart" - progress = "progress" - area_chart = "areaChart" - - -class MetricTableViewType(str, Enum): - table = "table" - pie_chart = "pieChart" - - -class MetricOtherViewType(str, Enum): - other_chart = "chart" - list_chart = "list" - - -class MetricType(str, Enum): - timeseries = "timeseries" - table = "table" - funnel = "funnel" - errors = "errors" - performance = "performance" - resources = "resources" - web_vital = "webVitals" - pathAnalysis = "pathAnalysis" - retention = "retention" - stickiness = "stickiness" - click_map = "clickMap" - insights = "insights" - - -class MetricOfErrors(str, Enum): - calls_errors = "callsErrors" # calls_errors - domains_errors_4xx = "domainsErrors4xx" # domains_errors_4xx - domains_errors_5xx = "domainsErrors5xx" # domains_errors_5xx - errors_per_domains = "errorsPerDomains" # errors_per_domains - errors_per_type = "errorsPerType" # errors_per_type - impacted_sessions_by_js_errors = "impactedSessionsByJsErrors" # impacted_sessions_by_js_errors - resources_by_party = "resourcesByParty" # resources_by_party - - -class MetricOfPerformance(str, Enum): - cpu = "cpu" # cpu - crashes = "crashes" # crashes - fps = "fps" # fps - impacted_sessions_by_slow_pages = "impactedSessionsBySlowPages" # impacted_sessions_by_slow_pages - memory_consumption = "memoryConsumption" # memory_consumption - pages_dom_buildtime = "pagesDomBuildtime" # pages_dom_buildtime - pages_response_time = "pagesResponseTime" # pages_response_time - pages_response_time_distribution = "pagesResponseTimeDistribution" # pages_response_time_distribution - resources_vs_visually_complete = "resourcesVsVisuallyComplete" # resources_vs_visually_complete - sessions_per_browser = "sessionsPerBrowser" # sessions_per_browser - slowest_domains = "slowestDomains" # slowest_domains - speed_location = "speedLocation" # speed_location - time_to_render = "timeToRender" # time_to_render - - -class MetricOfResources(str, Enum): - missing_resources = "missingResources" # missing_resources - resources_count_by_type = "resourcesCountByType" # resources_count_by_type - resources_loading_time = "resourcesLoadingTime" # resources_loading_time - resource_type_vs_response_end = "resourceTypeVsResponseEnd" # resource_type_vs_response_end - slowest_resources = "slowestResources" # slowest_resources - - -class MetricOfWebVitals(str, Enum): - avg_cpu = "avgCpu" # avg_cpu - avg_dom_content_loaded = "avgDomContentLoaded" # avg_dom_content_loaded - avg_dom_content_load_start = "avgDomContentLoadStart" # avg_dom_content_load_start - avg_first_contentful_pixel = "avgFirstContentfulPixel" # avg_first_contentful_pixel - avg_first_paint = "avgFirstPaint" # avg_first_paint - avg_fps = "avgFps" # avg_fps - avg_image_load_time = "avgImageLoadTime" # avg_image_load_time - avg_page_load_time = "avgPageLoadTime" # avg_page_load_time - avg_pages_dom_buildtime = "avgPagesDomBuildtime" # avg_pages_dom_buildtime - avg_pages_response_time = "avgPagesResponseTime" # avg_pages_response_time - avg_request_load_time = "avgRequestLoadTime" # avg_request_load_time - avg_response_time = "avgResponseTime" # avg_response_time - avg_session_duration = "avgSessionDuration" # avg_session_duration - avg_till_first_byte = "avgTillFirstByte" # avg_till_first_byte - avg_time_to_interactive = "avgTimeToInteractive" # avg_time_to_interactive - avg_time_to_render = "avgTimeToRender" # avg_time_to_render - avg_used_js_heap_size = "avgUsedJsHeapSize" # avg_used_js_heap_size - avg_visited_pages = "avgVisitedPages" # avg_visited_pages - count_requests = "countRequests" # count_requests - count_sessions = "countSessions" # count_sessions - - -class MetricOfTable(str, Enum): - user_os = FilterType.user_os.value - user_browser = FilterType.user_browser.value - user_device = FilterType.user_device.value - user_country = FilterType.user_country.value - user_city = FilterType.user_city.value - user_state = FilterType.user_state.value - user_id = FilterType.user_id.value - issues = FilterType.issue.value - visited_url = "location" - sessions = "sessions" - errors = "jsException" - - -class MetricOfTimeseries(str, Enum): - session_count = "sessionCount" - - -class MetricOfClickMap(str, Enum): - click_map_url = "clickMapUrl" - - -class CardSessionsSchema(FlatSessionsSearch, _PaginatedSchema, _TimedSchema): - startTimestamp: int = Field(TimeUTC.now(-7)) - endTimestamp: int = Field(TimeUTC.now()) - series: List[CardSeriesSchema] = Field(default=[]) - - class Config: - alias_generator = attribute_to_camel_case - - -class CardChartSchema(CardSessionsSchema): - density: int = Field(7) - - -class CardConfigSchema(BaseModel): - col: Optional[int] = Field(...) - row: Optional[int] = Field(default=2) - position: Optional[int] = Field(default=0) - - -class __CardSchema(BaseModel): - name: Optional[str] = Field(...) - is_public: bool = Field(default=True) - default_config: CardConfigSchema = Field(..., alias="config") - thumbnail: Optional[str] = Field(default=None) - metric_format: Optional[MetricFormatType] = Field(default=None) - - class Config: - alias_generator = attribute_to_camel_case - - -class CardSchema(__CardSchema, CardChartSchema): - view_type: Union[MetricTimeseriesViewType, \ - MetricTableViewType, MetricOtherViewType] = Field(...) - metric_type: MetricType = Field(...) - metric_of: Union[MetricOfTimeseries, MetricOfTable, MetricOfErrors, \ - MetricOfPerformance, MetricOfResources, MetricOfWebVitals, \ - MetricOfClickMap] = Field(default=MetricOfTable.user_id) - metric_value: List[IssueType] = Field(default=[]) - is_template: bool = Field(default=False) - - # This is used to handle wrong values sent by the UI - @root_validator(pre=True) - def transform(cls, values): - values["isTemplate"] = values.get("metricType") in [MetricType.errors, MetricType.performance, - MetricType.resources, MetricType.web_vital] - if values.get("metricType") == MetricType.timeseries \ - or values.get("metricType") == MetricType.table \ - and values.get("metricOf") != MetricOfTable.issues: - values["metricValue"] = [] - - if values.get("metricType") in [MetricType.funnel, MetricType.pathAnalysis] and \ - values.get("series") is not None and len(values["series"]) > 0: - values["series"] = [values["series"][0]] - elif values.get("metricType") not in [MetricType.table, - MetricType.timeseries, - MetricType.insights, - MetricType.click_map, - MetricType.funnel, - MetricType.pathAnalysis] \ - and values.get("series") is not None and len(values["series"]) > 0: - values["series"] = [] - - return values - - @root_validator - def restrictions(cls, values): - assert values.get("metric_type") != MetricType.insights, f"metricType:{MetricType.insights} not supported yet" - return values - - @root_validator - def validator(cls, values): - if values.get("metric_type") == MetricType.timeseries: - assert isinstance(values.get("view_type"), MetricTimeseriesViewType), \ - f"viewType must be of type {MetricTimeseriesViewType} for metricType:{MetricType.timeseries}" - assert isinstance(values.get("metric_of"), MetricOfTimeseries), \ - f"metricOf must be of type {MetricOfTimeseries} for metricType:{MetricType.timeseries}" - elif values.get("metric_type") == MetricType.table: - assert isinstance(values.get("view_type"), MetricTableViewType), \ - f"viewType must be of type {MetricTableViewType} for metricType:{MetricType.table}" - assert isinstance(values.get("metric_of"), MetricOfTable), \ - f"metricOf must be of type {MetricOfTable} for metricType:{MetricType.table}" - if values.get("metric_of") in (MetricOfTable.sessions, MetricOfTable.errors): - assert values.get("view_type") == MetricTableViewType.table, \ - f"viewType must be '{MetricTableViewType.table}' for metricOf:{values['metric_of']}" - if values.get("metric_of") != MetricOfTable.issues: - assert values.get("metric_value") is None or len(values.get("metric_value")) == 0, \ - f"metricValue is only available for metricOf:{MetricOfTable.issues}" - elif values.get("metric_type") == MetricType.funnel: - pass - # allow UI sot send empty series for funnel - # assert len(values["series"]) == 1, f"must have only 1 series for metricType:{MetricType.funnel}" - # ignore this for now, let the UI send whatever he wants for metric_of - # assert isinstance(values.get("metric_of"), MetricOfTimeseries), \ - # f"metricOf must be of type {MetricOfTimeseries} for metricType:{MetricType.funnel}" - elif values.get("metric_type") == MetricType.pathAnalysis: - pass - else: - if values.get("metric_type") == MetricType.errors: - assert isinstance(values.get("metric_of"), MetricOfErrors), \ - f"metricOf must be of type {MetricOfErrors} for metricType:{MetricType.errors}" - elif values.get("metric_type") == MetricType.performance: - assert isinstance(values.get("metric_of"), MetricOfPerformance), \ - f"metricOf must be of type {MetricOfPerformance} for metricType:{MetricType.performance}" - elif values.get("metric_type") == MetricType.resources: - assert isinstance(values.get("metric_of"), MetricOfResources), \ - f"metricOf must be of type {MetricOfResources} for metricType:{MetricType.resources}" - elif values.get("metric_type") == MetricType.web_vital: - assert isinstance(values.get("metric_of"), MetricOfWebVitals), \ - f"metricOf must be of type {MetricOfWebVitals} for metricType:{MetricType.web_vital}" - elif values.get("metric_type") == MetricType.click_map: - assert isinstance(values.get("metric_of"), MetricOfClickMap), \ - f"metricOf must be of type {MetricOfClickMap} for metricType:{MetricType.click_map}" - # Allow only LOCATION events for clickMap - for s in values.get("series", []): - for f in s.filter.events: - assert f.type == EventType.location, f"only events of type:{EventType.location} are allowed for metricOf:{MetricType.click_map}" - - assert isinstance(values.get("view_type"), MetricOtherViewType), \ - f"viewType must be 'chart|list' for metricOf:{values.get('metric_of')}" - - return values - - class Config: - alias_generator = attribute_to_camel_case - - -class CardUpdateSeriesSchema(CardSeriesSchema): - series_id: Optional[int] = Field(None) - - class Config: - alias_generator = attribute_to_camel_case - - -class UpdateCardSchema(CardSchema): - series: List[CardUpdateSeriesSchema] = Field(...) - - -class UpdateCustomMetricsStatusSchema(BaseModel): - active: bool = Field(...) - - -class SavedSearchSchema(FunnelSchema): - filter: FlatSessionsSearchPayloadSchema = Field([]) - - -class CreateDashboardSchema(BaseModel): - name: str = Field(..., min_length=1) - description: Optional[str] = Field(default='') - is_public: bool = Field(default=False) - is_pinned: bool = Field(default=False) - metrics: Optional[List[int]] = Field(default=[]) - - class Config: - alias_generator = attribute_to_camel_case - - -class EditDashboardSchema(CreateDashboardSchema): - is_public: Optional[bool] = Field(default=None) - is_pinned: Optional[bool] = Field(default=None) - - -class UpdateWidgetPayloadSchema(BaseModel): - config: dict = Field(default={}) - - class Config: - alias_generator = attribute_to_camel_case - - -class AddWidgetToDashboardPayloadSchema(UpdateWidgetPayloadSchema): - metric_id: int = Field(...) - - class Config: - alias_generator = attribute_to_camel_case - - -class TemplatePredefinedUnits(str, Enum): - millisecond = "ms" - second = "s" - minute = "min" - memory = "mb" - frame = "f/s" - percentage = "%" - count = "count" - - -class LiveFilterType(str, Enum): - user_os = FilterType.user_os.value - user_browser = FilterType.user_browser.value - user_device = FilterType.user_device.value - user_country = FilterType.user_country.value - user_id = FilterType.user_id.value - user_anonymous_id = FilterType.user_anonymous_id.value - rev_id = FilterType.rev_id.value - platform = FilterType.platform.value - page_title = "pageTitle" - session_id = "sessionId" - metadata = FilterType.metadata.value - user_UUID = "userUuid" - tracker_version = "trackerVersion" - user_browser_version = "userBrowserVersion" - user_device_type = "userDeviceType" - - -class LiveSessionSearchFilterSchema(BaseModel): - value: Union[List[str], str] = Field(...) - type: LiveFilterType = Field(...) - source: Optional[str] = Field(default=None) - operator: Literal[SearchEventOperator._is, \ - SearchEventOperator._contains] = Field(default=SearchEventOperator._contains) - - transform = root_validator(pre=True, allow_reuse=True)(transform_old_FilterType) - - @root_validator - def validator(cls, values): - if values.get("type") is not None and values["type"] == LiveFilterType.metadata: - assert values.get("source") is not None, "source should not be null for METADATA type" - assert len(values.get("source")) > 0, "source should not be empty for METADATA type" - return values - - -class LiveSessionsSearchPayloadSchema(_PaginatedSchema): - filters: List[LiveSessionSearchFilterSchema] = Field([]) - sort: Union[LiveFilterType, str] = Field(default="TIMESTAMP") - order: SortOrderType = Field(default=SortOrderType.desc) - - @root_validator(pre=True) - def transform(cls, values): - if values.get("order") is not None: - values["order"] = values["order"].upper() - if values.get("filters") is not None: - i = 0 - while i < len(values["filters"]): - if values["filters"][i]["value"] is None or len(values["filters"][i]["value"]) == 0: - del values["filters"][i] - else: - i += 1 - for i in values["filters"]: - if i.get("type") == LiveFilterType.platform: - i["type"] = LiveFilterType.user_device_type - if values.get("sort") is not None: - if values["sort"].lower() == "startts": - values["sort"] = "TIMESTAMP" - return values - - class Config: - alias_generator = attribute_to_camel_case - - -class IntegrationType(str, Enum): - github = "GITHUB" - jira = "JIRA" - slack = "SLACK" - ms_teams = "MSTEAMS" - sentry = "SENTRY" - bugsnag = "BUGSNAG" - rollbar = "ROLLBAR" - elasticsearch = "ELASTICSEARCH" - datadog = "DATADOG" - sumologic = "SUMOLOGIC" - stackdriver = "STACKDRIVER" - cloudwatch = "CLOUDWATCH" - newrelic = "NEWRELIC" - - -class SearchNoteSchema(_PaginatedSchema): - sort: str = Field(default="createdAt") - order: SortOrderType = Field(default=SortOrderType.desc) - tags: Optional[List[str]] = Field(default=[]) - shared_only: bool = Field(default=False) - mine_only: bool = Field(default=False) - - class Config: - alias_generator = attribute_to_camel_case - - -class SessionNoteSchema(BaseModel): - message: str = Field(..., min_length=2) - tag: Optional[str] = Field(default=None) - timestamp: int = Field(default=-1) - is_public: bool = Field(default=False) - - class Config: - alias_generator = attribute_to_camel_case - - -class SessionUpdateNoteSchema(SessionNoteSchema): - message: Optional[str] = Field(default=None, min_length=2) - timestamp: Optional[int] = Field(default=None, ge=-1) - is_public: Optional[bool] = Field(default=None) - - @root_validator - def validator(cls, values): - assert len(values.keys()) > 0, "at least 1 attribute should be provided for update" - c = 0 - for v in values.values(): - if v is not None and (not isinstance(v, str) or len(v) > 0): - c += 1 - break - assert c > 0, "at least 1 value should be provided for update" - return values - - -class WebhookType(str, Enum): - webhook = "webhook" - slack = "slack" - email = "email" - msteams = "msteams" - - -class SearchCardsSchema(_PaginatedSchema): - order: SortOrderType = Field(default=SortOrderType.desc) - shared_only: bool = Field(default=False) - mine_only: bool = Field(default=False) - query: Optional[str] = Field(default=None) - - class Config: - alias_generator = attribute_to_camel_case - - -class _ClickMapSearchEventRaw(_SessionSearchEventRaw): - type: Literal[EventType.location] = Field(...) - - -class FlatClickMapSessionsSearch(SessionsSearchPayloadSchema): - events: Optional[List[_ClickMapSearchEventRaw]] = Field([]) - filters: List[Union[SessionSearchFilterSchema, _ClickMapSearchEventRaw]] = Field([]) - - @root_validator(pre=True) - def transform(cls, values): - for f in values.get("filters", []): - if f.get("type") == FilterType.duration: - return values - values["filters"] = values.get("filters", []) - values["filters"].append({"value": [5000], "type": FilterType.duration, - "operator": SearchEventOperator._is, "filters": []}) - return values - - @root_validator() - def flat_to_original(cls, values): - if len(values["events"]) > 0: - return values - n_filters = [] - n_events = [] - for v in values.get("filters", []): - if isinstance(v, _ClickMapSearchEventRaw): - n_events.append(v) - else: - n_filters.append(v) - values["events"] = n_events - values["filters"] = n_filters - return values - - -class IssueAdvancedFilter(BaseModel): - type: IssueFilterType = Field(default=IssueFilterType._selector) - value: List[str] = Field(default=[]) - operator: SearchEventOperator = Field(default=SearchEventOperator._is) - - -class ClickMapFilterSchema(BaseModel): - value: List[Literal[IssueType.click_rage, IssueType.dead_click]] = Field(default=[]) - type: Literal[FilterType.issue] = Field(...) - operator: Literal[SearchEventOperator._is, MathOperator._equal] = Field(...) - # source: Optional[Union[ErrorSource, str]] = Field(default=None) - filters: List[IssueAdvancedFilter] = Field(default=[]) - - -class GetHeatmapPayloadSchema(BaseModel): - startDate: int = Field(TimeUTC.now(delta_days=-30)) - endDate: int = Field(TimeUTC.now()) - url: str = Field(...) - # issues: List[Literal[IssueType.click_rage, IssueType.dead_click]] = Field(default=[]) - filters: List[ClickMapFilterSchema] = Field(default=[]) - click_rage: bool = Field(default=False) - - class Config: - alias_generator = attribute_to_camel_case - - -class FeatureFlagVariant(BaseModel): - variant_id: Optional[int] = Field(default=None) - value: str = Field(...) - description: Optional[str] = Field(default=None) - payload: Optional[str] = Field(default=None) - rollout_percentage: Optional[int] = Field(default=0, ge=0, le=100) - - class Config: - alias_generator = attribute_to_camel_case - - -class FeatureFlagConditionFilterSchema(BaseModel): - is_event: bool = Field(False, const=False) - type: FilterType = Field(...) - value: List[str] = Field(default=[], min_items=1) - operator: Union[SearchEventOperator, MathOperator] = Field(...) - - -class FeatureFlagCondition(BaseModel): - condition_id: Optional[int] = Field(default=None) - name: str = Field(...) - rollout_percentage: Optional[int] = Field(default=0) - filters: List[FeatureFlagConditionFilterSchema] = Field(default=[]) - - class Config: - alias_generator = attribute_to_camel_case - - -class SearchFlagsSchema(_PaginatedSchema): - limit: int = Field(default=15, gt=0, le=200) - user_id: Optional[int] = Field(default=None) - order: SortOrderType = Field(default=SortOrderType.desc) - query: Optional[str] = Field(default=None) - is_active: Optional[bool] = Field(default=None) - - class Config: - alias_generator = attribute_to_camel_case - - -class FeatureFlagType(str, Enum): - single_variant = "single" - multi_variant = "multi" - - -class FeatureFlagStatus(BaseModel): - is_active: bool = Field(...) - - class Config: - alias_generator = attribute_to_camel_case - - -class ModuleStatus(BaseModel): - module: str = Field(..., description="Possible values: notes, bugs, live", - regex="^(assist|notes|bug-reports|offline-recordings|alerts)$") - status: bool = Field(...) - - class Config: - alias_generator = attribute_to_camel_case - - -class FeatureFlagSchema(BaseModel): - payload: Optional[str] = Field(default=None) - flag_key: str = Field(..., regex=r'^[a-zA-Z0-9\-]+$') - description: Optional[str] = Field(None) - flag_type: FeatureFlagType = Field(default=FeatureFlagType.single_variant) - is_persist: Optional[bool] = Field(default=False) - is_active: Optional[bool] = Field(default=True) - conditions: List[FeatureFlagCondition] = Field(default=[]) - variants: List[FeatureFlagVariant] = Field(default=[]) - - class Config: - alias_generator = attribute_to_camel_case diff --git a/api/schemas/__init__.py b/api/schemas/__init__.py new file mode 100644 index 000000000..d3ba6afc1 --- /dev/null +++ b/api/schemas/__init__.py @@ -0,0 +1,2 @@ +from .schemas import * +from . import overrides as _overrides \ No newline at end of file diff --git a/api/schemas/overrides.py b/api/schemas/overrides.py new file mode 100644 index 000000000..3fcf5ab18 --- /dev/null +++ b/api/schemas/overrides.py @@ -0,0 +1,62 @@ +from typing import TypeVar, Annotated, Union, Any +from enum import Enum as _Enum +from pydantic import BaseModel as _BaseModel +from pydantic import ConfigDict, TypeAdapter, Field +from pydantic.types import AnyType + + +def attribute_to_camel_case(snake_str: str) -> str: + components = snake_str.split("_") + return components[0] + ''.join(x.title() for x in components[1:]) + + +def transform_email(email: str) -> str: + return email.lower().strip() if isinstance(email, str) else email + + +def remove_whitespace(value: str) -> str: + return " ".join(value.split()) if isinstance(value, str) else value + + +def remove_duplicate_values(value: list) -> list: + if value is not None and isinstance(value, list): + if len(value) > 0 \ + and (isinstance(value[0], int) or isinstance(value[0], dict)): + return value + value = list(set(value)) + return value + + +def single_to_list(value: Union[list, Any]) -> list: + if value is not None and not isinstance(value, list): + value = [value] + return value + + +def schema_extra(schema: dict, _): + props = {} + for k, v in schema.get('properties', {}).items(): + if not v.get("doc_hidden", False): + props[k] = v + schema["properties"] = props + + +class BaseModel(_BaseModel): + model_config = ConfigDict(alias_generator=attribute_to_camel_case, + use_enum_values=True, + json_schema_extra=schema_extra) + + +class Enum(_Enum): + @classmethod + def has_value(cls, value) -> bool: + return value in cls._value2member_map_ + + +T = TypeVar('T') + + +class ORUnion: + def __new__(self, union_types: Union[AnyType], discriminator: str) -> T: + return lambda **args: TypeAdapter(Annotated[union_types, Field(discriminator=discriminator)]) \ + .validate_python(args) diff --git a/api/schemas/schemas.py b/api/schemas/schemas.py new file mode 100644 index 000000000..af0e33274 --- /dev/null +++ b/api/schemas/schemas.py @@ -0,0 +1,1644 @@ +from typing import Annotated, Any +from typing import Optional, List, Union, Literal + +from pydantic import Field, EmailStr, HttpUrl, SecretStr, AnyHttpUrl +from pydantic import field_validator, model_validator, computed_field + +from chalicelib.utils.TimeUTC import TimeUTC +from .overrides import BaseModel, Enum +from .overrides import transform_email, remove_whitespace, remove_duplicate_values, \ + single_to_list, ORUnion + + +# def transform_old_FilterType(cls, values): +def transform_old_filter_type(cls, values): + if values.get("type") is None: + return values + values["type"] = { + # filters + "USEROS": FilterType.user_os.value, + "USERBROWSER": FilterType.user_browser.value, + "USERDEVICE": FilterType.user_device.value, + "USERCOUNTRY": FilterType.user_country.value, + "USERID": FilterType.user_id.value, + "USERANONYMOUSID": FilterType.user_anonymous_id.value, + "REFERRER": FilterType.referrer.value, + "REVID": FilterType.rev_id.value, + "USEROS_IOS": FilterType.user_os_ios.value, + "USERDEVICE_IOS": FilterType.user_device_ios.value, + "USERCOUNTRY_IOS": FilterType.user_country_ios.value, + "USERID_IOS": FilterType.user_id_ios.value, + "USERANONYMOUSID_IOS": FilterType.user_anonymous_id_ios.value, + "REVID_IOS": FilterType.rev_id_ios.value, + "DURATION": FilterType.duration.value, + "PLATFORM": FilterType.platform.value, + "METADATA": FilterType.metadata.value, + "ISSUE": FilterType.issue.value, + "EVENTS_COUNT": FilterType.events_count.value, + "UTM_SOURCE": FilterType.utm_source.value, + "UTM_MEDIUM": FilterType.utm_medium.value, + "UTM_CAMPAIGN": FilterType.utm_campaign.value, + # events: + "CLICK": EventType.click.value, + "INPUT": EventType.input.value, + "LOCATION": EventType.location.value, + "CUSTOM": EventType.custom.value, + "REQUEST": EventType.request.value, + "FETCH": EventType.request_details.value, + "GRAPHQL": EventType.graphql.value, + "STATEACTION": EventType.state_action.value, + "ERROR": EventType.error.value, + "CLICK_IOS": EventType.click_ios.value, + "INPUT_IOS": EventType.input_ios.value, + "VIEW_IOS": EventType.view_ios.value, + "CUSTOM_IOS": EventType.custom_ios.value, + "REQUEST_IOS": EventType.request_ios.value, + "ERROR_IOS": EventType.error_ios.value, + "DOM_COMPLETE": PerformanceEventType.location_dom_complete.value, + "LARGEST_CONTENTFUL_PAINT_TIME": PerformanceEventType.location_largest_contentful_paint_time.value, + "TTFB": PerformanceEventType.location_ttfb.value, + "AVG_CPU_LOAD": PerformanceEventType.location_avg_cpu_load.value, + "AVG_MEMORY_USAGE": PerformanceEventType.location_avg_memory_usage.value, + "FETCH_FAILED": PerformanceEventType.fetch_failed.value, + }.get(values["type"], values["type"]) + return values + + +class _GRecaptcha(BaseModel): + g_recaptcha_response: Optional[str] = Field(default=None, alias='g-recaptcha-response') + + +class UserLoginSchema(_GRecaptcha): + email: EmailStr = Field(...) + password: SecretStr = Field(...) + + _transform_email = field_validator('email', mode='before')(transform_email) + + +class UserSignupSchema(UserLoginSchema): + fullname: str = Field(..., le=0) + organizationName: str = Field(..., le=0) + + _transform_fullname = field_validator('fullname', mode='before')(remove_whitespace) + _transform_organizationName = field_validator('organizationName', mode='before')(remove_whitespace) + + +class EditAccountSchema(BaseModel): + name: Optional[str] = Field(default=None) + tenantName: Optional[str] = Field(default=None) + opt_out: Optional[bool] = Field(default=None) + + _transform_name = field_validator('name', mode='before')(remove_whitespace) + _transform_tenantName = field_validator('tenantName', mode='before')(remove_whitespace) + + +class ForgetPasswordPayloadSchema(_GRecaptcha): + email: EmailStr = Field(...) + + _transform_email = field_validator('email', mode='before')(transform_email) + + +class EditUserPasswordSchema(BaseModel): + old_password: SecretStr = Field(...) + new_password: SecretStr = Field(...) + + +# class UpdateTenantSchema(BaseModel): +# name: Optional[str] = Field(default=None) +# opt_out: Optional[bool] = Field(default=None) +# tenant_name: Optional[str] = Field(default=None) + + +class CreateProjectSchema(BaseModel): + name: str = Field(default="my first project") + platform: Literal["web", "ios"] = Field(default="web") + + _transform_name = field_validator('name', mode='before')(remove_whitespace) + + +class CurrentAPIContext(BaseModel): + tenant_id: int = Field(...) + + +class CurrentContext(CurrentAPIContext): + user_id: int = Field(...) + email: EmailStr = Field(...) + + _transform_email = field_validator('email', mode='before')(transform_email) + + +class AddCollaborationSchema(BaseModel): + name: str = Field(...) + url: HttpUrl = Field(...) + + _transform_name = field_validator('name', mode='before')(remove_whitespace) + _transform_url = field_validator('url', mode='before')(remove_whitespace) + + +class EditCollaborationSchema(AddCollaborationSchema): + name: Optional[str] = Field(default=None) + + +# class CreateNotificationSchema(BaseModel): +# token: str = Field(...) +# notifications: List = Field(...) + + +class _TimedSchema(BaseModel): + startTimestamp: int = Field(default=None) + endTimestamp: int = Field(default=None) + + @model_validator(mode='before') + def transform_time(cls, values): + if values.get("startTimestamp") is None and values.get("startDate") is not None: + values["startTimestamp"] = values["startDate"] + if values.get("endTimestamp") is None and values.get("endDate") is not None: + values["endTimestamp"] = values["endDate"] + return values + + @model_validator(mode='after') + def __time_validator(cls, values): + if values.startTimestamp is not None: + assert 0 <= values.startTimestamp, "startTimestamp must be greater or equal to 0" + if values.endTimestamp is not None: + assert 0 <= values.endTimestamp, "endTimestamp must be greater or equal to 0" + if values.startTimestamp is not None and values.endTimestamp is not None: + assert values.startTimestamp <= values.endTimestamp, \ + "endTimestamp must be greater or equal to startTimestamp" + return values + + +class NotificationsViewSchema(_TimedSchema): + ids: List[int] = Field(default=[]) + startTimestamp: Optional[int] = Field(default=None) + endTimestamp: Optional[int] = Field(default=None) + + +class IssueTrackingIntegration(BaseModel): + token: str = Field(...) + + +class IssueTrackingGithubSchema(IssueTrackingIntegration): + pass + + +class IssueTrackingJiraSchema(IssueTrackingIntegration): + username: str = Field(...) + url: HttpUrl = Field(...) + + @field_validator('url') + @classmethod + def transform_url(cls, v: HttpUrl): + return HttpUrl.build(scheme=v.scheme.lower(), host=v.host.lower()) + + +class WebhookSchema(BaseModel): + webhook_id: Optional[int] = Field(default=None) + endpoint: AnyHttpUrl = Field(...) + auth_header: Optional[str] = Field(default=None) + name: str = Field(default="", max_length=100) + + _transform_name = field_validator('name', mode='before')(remove_whitespace) + + +class CreateMemberSchema(BaseModel): + user_id: Optional[int] = Field(default=None) + name: str = Field(...) + email: EmailStr = Field(...) + admin: bool = Field(default=False) + + _transform_email = field_validator('email', mode='before')(transform_email) + _transform_name = field_validator('name', mode='before')(remove_whitespace) + + +class EditMemberSchema(BaseModel): + name: str = Field(...) + email: EmailStr = Field(...) + admin: bool = Field(default=False) + + _transform_email = field_validator('email', mode='before')(transform_email) + _transform_name = field_validator('name', mode='before')(remove_whitespace) + + +class EditPasswordByInvitationSchema(BaseModel): + invitation: str = Field(...) + passphrase: str = Field(..., alias="pass") + password: SecretStr = Field(...) + + +class AssignmentSchema(BaseModel): + assignee: str = Field(...) + description: str = Field(...) + title: str = Field(...) + issue_type: str = Field(...) + + _transform_title = field_validator('title', mode='before')(remove_whitespace) + + +class CommentAssignmentSchema(BaseModel): + message: str = Field(...) + + +class IntegrationNotificationSchema(BaseModel): + comment: Optional[str] = Field(default=None) + + +class GdprSchema(BaseModel): + maskEmails: bool = Field(...) + sampleRate: int = Field(...) + maskNumbers: bool = Field(...) + defaultInputMode: str = Field(...) + + +class SampleRateSchema(BaseModel): + rate: int = Field(..., ge=0, le=100) + capture_all: bool = Field(default=False) + + +class WeeklyReportConfigSchema(BaseModel): + weekly_report: bool = Field(default=True) + + +class IntegrationBase(BaseModel): + pass + + +class IntegrationSentrySchema(IntegrationBase): + project_slug: str = Field(...) + organization_slug: str = Field(...) + token: str = Field(...) + + +class IntegrationDatadogSchema(IntegrationBase): + api_key: str = Field(...) + application_key: str = Field(...) + + +class IntegartionStackdriverSchema(IntegrationBase): + service_account_credentials: str = Field(...) + log_name: str = Field(...) + + +class IntegrationNewrelicSchema(IntegrationBase): + application_id: str = Field(...) + x_query_key: str = Field(...) + region: str = Field(...) + + +class IntegrationRollbarSchema(IntegrationBase): + access_token: str = Field(...) + + +class IntegrationBugsnagBasicSchema(IntegrationBase): + authorization_token: str = Field(...) + + +class IntegrationBugsnagSchema(IntegrationBugsnagBasicSchema): + bugsnag_project_id: str = Field(...) + + +class IntegrationCloudwatchBasicSchema(IntegrationBase): + aws_access_key_id: str = Field(...) + aws_secret_access_key: str = Field(...) + region: str = Field(...) + + +class IntegrationCloudwatchSchema(IntegrationCloudwatchBasicSchema): + log_group_name: str = Field(...) + + +class IntegrationElasticsearchTestSchema(IntegrationBase): + host: str = Field(...) + port: int = Field(...) + api_key_id: str = Field(...) + api_key: str = Field(...) + + +class IntegrationElasticsearchSchema(IntegrationElasticsearchTestSchema): + indexes: str = Field(...) + + +class IntegrationSumologicSchema(IntegrationBase): + access_id: str = Field(...) + access_key: str = Field(...) + region: str = Field(...) + + +class MetadataSchema(BaseModel): + index: Optional[int] = Field(default=None) + key: str = Field(...) + + _transform_key = field_validator('key', mode='before')(remove_whitespace) + + +# class MetadataListSchema(BaseModel): +# list: List[MetadataSchema] = Field(...) + + +class EmailPayloadSchema(BaseModel): + auth: str = Field(...) + email: EmailStr = Field(...) + link: str = Field(...) + message: str = Field(...) + + _transform_email = field_validator('email', mode='before')(transform_email) + + +class MemberInvitationPayloadSchema(BaseModel): + auth: str = Field(...) + email: EmailStr = Field(...) + invitation_link: str = Field(...) + client_id: str = Field(...) + sender_name: str = Field(...) + + _transform_email = field_validator('email', mode='before')(transform_email) + + +class _AlertMessageSchema(BaseModel): + type: str = Field(...) + value: str = Field(...) + + +class AlertDetectionType(str, Enum): + percent = "percent" + change = "change" + + +class _AlertOptionSchema(BaseModel): + message: List[_AlertMessageSchema] = Field([]) + currentPeriod: Literal[15, 30, 60, 120, 240, 1440] = Field(...) + previousPeriod: Literal[15, 30, 60, 120, 240, 1440] = Field(default=15) + lastNotification: Optional[int] = Field(default=None) + renotifyInterval: Optional[int] = Field(default=720) + + +class AlertColumn(str, Enum): + performance__dom_content_loaded__average = "performance.dom_content_loaded.average" + performance__first_meaningful_paint__average = "performance.first_meaningful_paint.average" + performance__page_load_time__average = "performance.page_load_time.average" + performance__dom_build_time__average = "performance.dom_build_time.average" + performance__speed_index__average = "performance.speed_index.average" + performance__page_response_time__average = "performance.page_response_time.average" + performance__ttfb__average = "performance.ttfb.average" + performance__time_to_render__average = "performance.time_to_render.average" + performance__image_load_time__average = "performance.image_load_time.average" + performance__request_load_time__average = "performance.request_load_time.average" + resources__load_time__average = "resources.load_time.average" + resources__missing__count = "resources.missing.count" + errors__4xx_5xx__count = "errors.4xx_5xx.count" + errors__4xx__count = "errors.4xx.count" + errors__5xx__count = "errors.5xx.count" + errors__javascript__impacted_sessions__count = "errors.javascript.impacted_sessions.count" + performance__crashes__count = "performance.crashes.count" + errors__javascript__count = "errors.javascript.count" + errors__backend__count = "errors.backend.count" + custom = "CUSTOM" + + +class MathOperator(str, Enum): + _equal = "=" + _less = "<" + _greater = ">" + _less_eq = "<=" + _greater_eq = ">=" + + +class _AlertQuerySchema(BaseModel): + left: Union[AlertColumn, int] = Field(...) + right: float = Field(...) + operator: MathOperator = Field(...) + + +class AlertDetectionMethod(str, Enum): + threshold = "threshold" + change = "change" + + +class AlertSchema(BaseModel): + name: str = Field(...) + detection_method: AlertDetectionMethod = Field(...) + change: Optional[AlertDetectionType] = Field(default=AlertDetectionType.change) + description: Optional[str] = Field(default=None) + options: _AlertOptionSchema = Field(...) + query: _AlertQuerySchema = Field(...) + series_id: Optional[int] = Field(default=None, doc_hidden=True) + + @model_validator(mode="after") + def transform_alert(cls, values): + values.series_id = None + if isinstance(values.query.left, int): + values.series_id = values.query.left + values.query.left = AlertColumn.custom + + return values + + +class SourcemapUploadPayloadSchema(BaseModel): + urls: List[str] = Field(..., alias="URL") + + +class ErrorSource(str, Enum): + js_exception = "js_exception" + bugsnag = "bugsnag" + cloudwatch = "cloudwatch" + datadog = "datadog" + newrelic = "newrelic" + rollbar = "rollbar" + sentry = "sentry" + stackdriver = "stackdriver" + sumologic = "sumologic" + + +class EventType(str, Enum): + click = "click" + input = "input" + location = "location" + custom = "custom" + request = "request" + request_details = "fetch" + graphql = "graphql" + state_action = "stateAction" + error = "error" + click_ios = "tapIos" + input_ios = "inputIos" + view_ios = "viewIos" + custom_ios = "customIos" + request_ios = "requestIos" + error_ios = "errorIos" + swipe_ios = "swipeIos" + + +class PerformanceEventType(str, Enum): + location_dom_complete = "domComplete" + location_largest_contentful_paint_time = "largestContentfulPaintTime" + location_ttfb = "ttfb" + location_avg_cpu_load = "avgCpuLoad" + location_avg_memory_usage = "avgMemoryUsage" + fetch_failed = "fetchFailed" + # fetch_duration = "FETCH_DURATION" + + +class FilterType(str, Enum): + user_os = "userOs" + user_browser = "userBrowser" + user_device = "userDevice" + user_country = "userCountry" + user_city = "userCity" + user_state = "userState" + user_id = "userId" + user_anonymous_id = "userAnonymousId" + referrer = "referrer" + rev_id = "revId" + # IOS + user_os_ios = "userOsIos" + user_device_ios = "userDeviceIos" + user_country_ios = "userCountryIos" + user_id_ios = "userIdIos" + user_anonymous_id_ios = "userAnonymousIdIos" + rev_id_ios = "revIdIos" + # + duration = "duration" + platform = "platform" + metadata = "metadata" + issue = "issue" + events_count = "eventsCount" + utm_source = "utmSource" + utm_medium = "utmMedium" + utm_campaign = "utmCampaign" + + +class SearchEventOperator(str, Enum): + _is = "is" + _is_any = "isAny" + _on = "on" + _on_any = "onAny" + _is_not = "isNot" + _is_undefined = "isUndefined" + _not_on = "notOn" + _contains = "contains" + _not_contains = "notContains" + _starts_with = "startsWith" + _ends_with = "endsWith" + + +class ClickEventExtraOperator(str, Enum): + _on_selector = "onSelector" + + +class PlatformType(str, Enum): + mobile = "mobile" + desktop = "desktop" + tablet = "tablet" + + +class SearchEventOrder(str, Enum): + _then = "then" + _or = "or" + _and = "and" + + +class IssueType(str, Enum): + click_rage = 'click_rage' + dead_click = 'dead_click' + excessive_scrolling = 'excessive_scrolling' + bad_request = 'bad_request' + missing_resource = 'missing_resource' + memory = 'memory' + cpu = 'cpu' + slow_resource = 'slow_resource' + slow_page_load = 'slow_page_load' + crash = 'crash' + custom = 'custom' + js_exception = 'js_exception' + mouse_thrashing = 'mouse_thrashing' + + +class MetricFormatType(str, Enum): + session_count = 'sessionCount' + + +class HttpMethod(str, Enum): + _get = 'GET' + _head = 'HEAD' + _post = 'POST' + _put = 'PUT' + _delete = 'DELETE' + _connect = 'CONNECT' + _option = 'OPTIONS' + _trace = 'TRACE' + _patch = 'PATCH' + + +class FetchFilterType(str, Enum): + _url = "fetchUrl" # FETCH_URL + _status_code = "fetchStatusCode" # FETCH_STATUS_CODE + _method = "fetchMethod" # FETCH_METHOD + _duration = "fetchDuration" # FETCH_DURATION + _request_body = "fetchRequestBody" # FETCH_REQUEST_BODY + _response_body = "fetchResponseBody" # FETCH_RESPONSE_BODY + + +class GraphqlFilterType(str, Enum): + _name = "graphqlName" # GRAPHQL_NAME + _method = "graphqlMethod" # GRAPHQL_METHOD + _request_body = "graphqlRequestBody" # GRAPHQL_REQUEST_BODY + _response_body = "graphqlResponseBody" # GRAPHQL_RESPONSE_BODY + + +class RequestGraphqlFilterSchema(BaseModel): + type: Union[FetchFilterType, GraphqlFilterType] = Field(...) + value: List[Union[int, str]] = Field(...) + operator: Union[SearchEventOperator, MathOperator] = Field(...) + + +# class SessionSearchEventRaw(BaseModel): +class SessionSearchEventSchema2(BaseModel): + # is_event: bool = Field(default=True, const=True) + is_event: Literal[True] = True + value: List[str] = Field(...) + type: Union[EventType, PerformanceEventType] = Field(...) + operator: Union[SearchEventOperator, ClickEventExtraOperator] = Field(...) + source: Optional[List[Union[ErrorSource, int, str]]] = Field(default=None) + sourceOperator: Optional[MathOperator] = Field(default=None) + filters: Optional[List[RequestGraphqlFilterSchema]] = Field(default=[]) + + _remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values) + _single_to_list_values = field_validator('value', mode='before')(single_to_list) + _transform = model_validator(mode='before')(transform_old_filter_type) + + @model_validator(mode='after') + def event_validator(cls, values): + if isinstance(values.type, PerformanceEventType): + if values.type == PerformanceEventType.fetch_failed: + return values + # assert values.get("source") is not None, "source should not be null for PerformanceEventType" + # assert isinstance(values["source"], list) and len(values["source"]) > 0, \ + # "source should not be empty for PerformanceEventType" + assert values.sourceOperator is not None, \ + "sourceOperator should not be null for PerformanceEventType" + assert "source" in values, f"source is required for {values.type}" + assert isinstance(values.source, list), f"source of type list is required for {values.type}" + for c in values["source"]: + assert isinstance(c, int), f"source value should be of type int for {values.type}" + elif values.type == EventType.error and values.source is None: + values.source = [ErrorSource.js_exception] + elif values.type == EventType.request_details: + assert isinstance(values.filters, List) and len(values.filters) > 0, \ + f"filters should be defined for {EventType.request_details}" + elif values.type == EventType.graphql: + assert isinstance(values.filters, List) and len(values.filters) > 0, \ + f"filters should be defined for {EventType.graphql}" + + if isinstance(values.operator, ClickEventExtraOperator): + assert values.type == EventType.click, \ + f"operator:{values.operator} is only available for event-type: {EventType.click}" + return values + + +# class _SessionSearchEventSchema(SessionSearchEventRaw): +# value: Union[List[Union[SessionSearchEventRaw, str]], str] = Field(...) +class SessionSearchFilterSchema(BaseModel): + # class SessionSearchFilterSchema(ORBaseModel): + # is_event: bool = Field(False, const=False) + is_event: Literal[False] = False + value: List[Union[IssueType, PlatformType, int, str]] = Field(default=[]) + type: FilterType = Field(...) + operator: Union[SearchEventOperator, MathOperator] = Field(...) + source: Optional[Union[ErrorSource, str]] = Field(default=None) + + _remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values) + _transform = model_validator(mode='before')(transform_old_filter_type) + _single_to_list_values = field_validator('value', mode='before')(single_to_list) + + @model_validator(mode='after') + def filter_validator(cls, values): + if values.type == FilterType.metadata: + assert values.source is not None and len(values.source) > 0, \ + "must specify a valid 'source' for metadata filter" + elif values.type == FilterType.issue: + for v in values.value: + # assert isinstance(v, IssueType), f"value should be of type IssueType for {values.type} filter" + if IssueType.has_value(v): + v = IssueType(v) + else: + raise ValueError(f"value should be of type IssueType for {values.type} filter") + elif values.type == FilterType.platform: + for v in values.value: + # assert isinstance(v, PlatformType), f"value should be of type PlatformType for {values.type} filter" + if PlatformType.has_value(v): + v = PlatformType(v) + else: + raise ValueError(f"value should be of type PlatformType for {values.type} filter") + elif values.type == FilterType.events_count: + # assert isinstance(values.operator, MathOperator), \ + # f"operator should be of type MathOperator for {values.type} filter" + if values.operator in MathOperator.has_value(values.operator): + values.operator = MathOperator(values.operator) + else: + raise ValueError(f"operator should be of type MathOperator for {values.type} filter") + + for v in values.value: + assert isinstance(v, int), f"value should be of type int for {values.type} filter" + else: + # assert isinstance(values.operator, SearchEventOperator), \ + # f"operator should be of type SearchEventOperator for {values.type} filter" + if SearchEventOperator.has_value(values.operator): + values.operator = SearchEventOperator(values.operator) + else: + raise ValueError(f"operator should be of type SearchEventOperator for {values.type} filter") + + return values + + +class _PaginatedSchema(BaseModel): + limit: int = Field(default=200, gt=0, le=200) + page: int = Field(default=1, gt=0) + + +class SortOrderType(str, Enum): + asc = "ASC" + desc = "DESC" + + +# this type is created to allow mixing events&filters and specifying a discriminator +GroupedFilterType = Annotated[Union[SessionSearchFilterSchema, SessionSearchEventSchema2], \ + Field(discriminator='is_event')] + + +class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema): + events: List[SessionSearchEventSchema2] = Field(default=[], doc_hidden=True) + filters: List[GroupedFilterType] = Field(default=[]) + sort: str = Field(default="startTs") + order: SortOrderType = Field(default=SortOrderType.desc) + events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then) + group_by_user: bool = Field(default=False) + bookmarked: bool = Field(default=False) + + @model_validator(mode="before") + def transform_order(cls, values): + if values.get("sort") is None: + values["sort"] = "startTs" + + if values.get("order") is None: + values["order"] = SortOrderType.desc + else: + values["order"] = values["order"].upper() + return values + + @model_validator(mode="before") + def add_missing_attributes(cls, values): + # in case the old search body was passed + if len(values.get("events", [])) > 0: + for v in values["events"]: + v["isEvent"] = True + for v in values.get("filters", []): + v["isEvent"] = False + else: + for v in values.get("filters", []): + if v.get("isEvent") is None: + v["isEvent"] = False + return values + + @model_validator(mode="after") + def split_filters_events(cls, values): + # in case the old search body was passed + n_filters = [] + n_events = [] + for v in values.filters: + if v.is_event: + n_events.append(v) + else: + n_filters.append(v) + values.events = n_events + values.filters = n_filters + return values + + +# class FlatSessionsSearch(BaseModel): +# events: Optional[List[_SessionSearchEventSchema]] = Field([]) +# filters: List[Union[SessionSearchFilterSchema, _SessionSearchEventSchema]] = Field([]) +# +# @root_validator(pre=True) +# def flat_to_original(cls, values): +# # in case the old search body was passed +# if len(values.get("events", [])) > 0: +# for v in values["events"]: +# v["isEvent"] = True +# for v in values.get("filters", []): +# v["isEvent"] = False +# else: +# n_filters = [] +# n_events = [] +# for v in values.get("filters", []): +# if v.get("isEvent"): +# n_events.append(v) +# else: +# v["isEvent"] = False +# n_filters.append(v) +# values["events"] = n_events +# values["filters"] = n_filters +# return values + + +# class SessionsSearchCountSchema(FlatSessionsSearchPayloadSchema): +# # class SessionsSearchCountSchema(SessionsSearchPayloadSchema): +# sort: Optional[str] = Field(default=None) +# order: Optional[str] = Field(default=None) + +# +# # class FunnelSearchPayloadSchema(FlatSessionsSearchPayloadSchema): +# class FunnelSearchPayloadSchema(SessionsSearchPayloadSchema): +# range_value: Optional[str] = Field(default=None) +# sort: Optional[str] = Field(default=None) +# order: Optional[str] = Field(default=None) +# events_order: Literal[SearchEventOrder._then] = SearchEventOrder._then +# group_by_user: Literal[False] = False +# +# @model_validator(mode="before") +# def __enforce_default_values(cls, values): +# values["eventsOrder"] = SearchEventOrder._then +# values["groupByUser"] = False +# return values + + +# +# class FunnelSchema(BaseModel): +# name: str = Field(...) +# filter: FunnelSearchPayloadSchema = Field([]) + + +# +# # class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema): +# class FunnelInsightsPayloadSchema(SessionsSearchPayloadSchema): +# sort: Optional[str] = Field(None) +# order: Optional[str] = Field(None) +# events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then, const=True) +# group_by_user: Optional[bool] = Field(default=False, const=True) +# rangeValue: Optional[str] = Field(None) +# + +class ErrorStatus(str, Enum): + all = 'all' + unresolved = 'unresolved' + resolved = 'resolved' + ignored = 'ignored' + + +class ErrorSort(str, Enum): + occurrence = 'occurrence' + users_count = 'users' + sessions_count = 'sessions' + + +# class SearchErrorsSchema(FlatSessionsSearchPayloadSchema): +class SearchErrorsSchema(SessionsSearchPayloadSchema): + sort: ErrorSort = Field(default=ErrorSort.occurrence) + density: Optional[int] = Field(default=7) + status: Optional[ErrorStatus] = Field(default=ErrorStatus.all) + query: Optional[str] = Field(default=None) + + +class ProductAnalyticsSelectedEventType(str, Enum): + click = EventType.click.value + input = EventType.input.value + location = EventType.location.value + custom_event = EventType.custom.value + + +class ProductAnalyticsFilterType(str, Enum): + start_point = 'startPoint' + end_point = 'endPoint' + exclude = 'exclude' + + +class PathAnalysisSubFilterSchema(BaseModel): + is_event: Literal[True] = True + value: List[str] = Field(...) + type: ProductAnalyticsSelectedEventType = Field(...) + operator: Union[SearchEventOperator, ClickEventExtraOperator] = Field(...) + + _remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values) + + +class ProductAnalyticsFilter(BaseModel): + # The filters attribute will help with startPoint/endPoint/exclude + filters: Optional[List[PathAnalysisSubFilterSchema]] = Field(default=[]) + type: Union[ProductAnalyticsFilterType, FilterType] + operator: Union[SearchEventOperator, ClickEventExtraOperator, MathOperator] = Field(...) + # TODO: support session metadat filters + value: List[Union[IssueType, PlatformType, int, str]] = Field(...) + + _remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values) + + # @model_validator(mode='after') + # def __validator(cls, values): + # if values.type == ProductAnalyticsFilterType.event_type: + # assert values.value is not None and len(values.value) > 0, \ + # f"value must be provided for type:{ProductAnalyticsFilterType.event_type}" + # assert ProductAnalyticsEventType.has_value(values.value[0]), \ + # f"value must be of type {ProductAnalyticsEventType} for type:{ProductAnalyticsFilterType.event_type}" + # + # return values + + +class PathAnalysisSchema(_TimedSchema, _PaginatedSchema): + # startTimestamp: int = Field(default=TimeUTC.now(delta_days=-1)) + # endTimestamp: int = Field(default=TimeUTC.now()) + density: int = Field(default=7) + filters: List[ProductAnalyticsFilter] = Field(default=[]) + type: Optional[str] = Field(default=None) + + @model_validator(mode='after') + def __validator(cls, values): + filters = [] + for f in values.filters: + if ProductAnalyticsFilterType.has_value(f.type) and (f.filters is None or len(f.filters) == 0): + continue + filters.append(f) + values.filters = filters + + # Path analysis should have only 1 start-point with multiple values OR 1 end-point with multiple values + # start-point's value and end-point's value should not be excluded + s_e_detected = 0 + s_e_values = {} + exclude_values = {} + for f in values.filters: + if f.type in (ProductAnalyticsFilterType.start_point, ProductAnalyticsFilterType.end_point): + s_e_detected += 1 + for s in f.filters: + s_e_values[s.type] = s_e_values.get(s.type, []) + s.value + elif f.type in ProductAnalyticsFilterType.exclude: + for s in f.filters: + exclude_values[s.type] = exclude_values.get(s.type, []) + s.value + + assert s_e_detected <= 1, f"Only 1 startPoint with multiple values OR 1 endPoint with multiple values is allowed" + for t in exclude_values: + for v in t: + assert v not in s_e_values.get(t, []), f"startPoint and endPoint cannot be excluded, value: {v}" + + return values + + +# class AssistSearchPayloadSchema(BaseModel): +# filters: List[dict] = Field([]) + + +class MobileSignPayloadSchema(BaseModel): + keys: List[str] = Field(...) + + +class CardSeriesFilterSchema(SearchErrorsSchema): + # TODO: transform these if they are used by the UI + # startDate: Optional[int] = Field(default=None) + # endDate: Optional[int] = Field(default=None) + sort: Optional[str] = Field(default=None) + order: SortOrderType = Field(default=SortOrderType.desc) + group_by_user: Literal[False] = False + + +class CardSeriesSchema(BaseModel): + series_id: Optional[int] = Field(default=None) + name: Optional[str] = Field(default=None) + index: Optional[int] = Field(default=None) + filter: Optional[CardSeriesFilterSchema] = Field(default=None) + + +class MetricTimeseriesViewType(str, Enum): + line_chart = "lineChart" + progress = "progress" + area_chart = "areaChart" + + +class MetricTableViewType(str, Enum): + table = "table" + pie_chart = "pieChart" + + +class MetricOtherViewType(str, Enum): + other_chart = "chart" + list_chart = "list" + + +class MetricType(str, Enum): + timeseries = "timeseries" + table = "table" + funnel = "funnel" + errors = "errors" + performance = "performance" + resources = "resources" + web_vital = "webVitals" + pathAnalysis = "pathAnalysis" + retention = "retention" + stickiness = "stickiness" + click_map = "clickMap" + insights = "insights" + + +class MetricOfErrors(str, Enum): + calls_errors = "callsErrors" # calls_errors + domains_errors_4xx = "domainsErrors4xx" # domains_errors_4xx + domains_errors_5xx = "domainsErrors5xx" # domains_errors_5xx + errors_per_domains = "errorsPerDomains" # errors_per_domains + errors_per_type = "errorsPerType" # errors_per_type + impacted_sessions_by_js_errors = "impactedSessionsByJsErrors" # impacted_sessions_by_js_errors + resources_by_party = "resourcesByParty" # resources_by_party + + +class MetricOfPerformance(str, Enum): + cpu = "cpu" # cpu + crashes = "crashes" # crashes + fps = "fps" # fps + impacted_sessions_by_slow_pages = "impactedSessionsBySlowPages" # impacted_sessions_by_slow_pages + memory_consumption = "memoryConsumption" # memory_consumption + pages_dom_buildtime = "pagesDomBuildtime" # pages_dom_buildtime + pages_response_time = "pagesResponseTime" # pages_response_time + pages_response_time_distribution = "pagesResponseTimeDistribution" # pages_response_time_distribution + resources_vs_visually_complete = "resourcesVsVisuallyComplete" # resources_vs_visually_complete + sessions_per_browser = "sessionsPerBrowser" # sessions_per_browser + slowest_domains = "slowestDomains" # slowest_domains + speed_location = "speedLocation" # speed_location + time_to_render = "timeToRender" # time_to_render + + +class MetricOfResources(str, Enum): + missing_resources = "missingResources" # missing_resources + resources_count_by_type = "resourcesCountByType" # resources_count_by_type + resources_loading_time = "resourcesLoadingTime" # resources_loading_time + resource_type_vs_response_end = "resourceTypeVsResponseEnd" # resource_type_vs_response_end + slowest_resources = "slowestResources" # slowest_resources + + +class MetricOfWebVitals(str, Enum): + avg_cpu = "avgCpu" # avg_cpu + avg_dom_content_loaded = "avgDomContentLoaded" # avg_dom_content_loaded + avg_dom_content_load_start = "avgDomContentLoadStart" # avg_dom_content_load_start + avg_first_contentful_pixel = "avgFirstContentfulPixel" # avg_first_contentful_pixel + avg_first_paint = "avgFirstPaint" # avg_first_paint + avg_fps = "avgFps" # avg_fps + avg_image_load_time = "avgImageLoadTime" # avg_image_load_time + avg_page_load_time = "avgPageLoadTime" # avg_page_load_time + avg_pages_dom_buildtime = "avgPagesDomBuildtime" # avg_pages_dom_buildtime + avg_pages_response_time = "avgPagesResponseTime" # avg_pages_response_time + avg_request_load_time = "avgRequestLoadTime" # avg_request_load_time + avg_response_time = "avgResponseTime" # avg_response_time + avg_session_duration = "avgSessionDuration" # avg_session_duration + avg_till_first_byte = "avgTillFirstByte" # avg_till_first_byte + avg_time_to_interactive = "avgTimeToInteractive" # avg_time_to_interactive + avg_time_to_render = "avgTimeToRender" # avg_time_to_render + avg_used_js_heap_size = "avgUsedJsHeapSize" # avg_used_js_heap_size + avg_visited_pages = "avgVisitedPages" # avg_visited_pages + count_requests = "countRequests" # count_requests + count_sessions = "countSessions" # count_sessions + + +class MetricOfTable(str, Enum): + user_os = FilterType.user_os.value + user_browser = FilterType.user_browser.value + user_device = FilterType.user_device.value + user_country = FilterType.user_country.value + # user_city = FilterType.user_city.value + # user_state = FilterType.user_state.value + user_id = FilterType.user_id.value + issues = FilterType.issue.value + visited_url = "location" + sessions = "sessions" + errors = "jsException" + + +class MetricOfTimeseries(str, Enum): + session_count = "sessionCount" + + +class MetricOfFunnels(str, Enum): + session_count = MetricOfTimeseries.session_count.value + + +class MetricOfClickMap(str, Enum): + click_map_url = "clickMapUrl" + + +class MetricOfPathAnalysis(str, Enum): + session_count = MetricOfTimeseries.session_count.value + + +# class CardSessionsSchema(FlatSessionsSearch, _PaginatedSchema, _TimedSchema): +class CardSessionsSchema(SessionsSearchPayloadSchema): + startTimestamp: int = Field(default=TimeUTC.now(-7)) + endTimestamp: int = Field(defautl=TimeUTC.now()) + density: int = Field(default=7, ge=1, le=200) + series: List[CardSeriesSchema] = Field(default=[]) + + # Used mainly for PathAnalysis, and could be used by other cards + hide_excess: Optional[bool] = Field(default=False, description="Hide extra values") + + @model_validator(mode="before") + def __enforce_default(cls, values): + if values.get("startTimestamp") is None: + values["startTimestamp"] = TimeUTC.now(-7) + + if values.get("endTimestamp") is None: + values["endTimestamp"] = TimeUTC.now() + + for s in values.get("series", []): + if s.get("filter") is not None: + s["filter"]["startTimestamp"] = values["startTimestamp"] + s["filter"]["endTimestamp"] = values["endTimestamp"] + + return values + + @model_validator(mode="after") + def __enforce_default_after(cls, values): + for s in values.series: + if s.filter is not None: + s.filter.limit = values.limit + s.filter.page = values.page + + return values + + +# +# +# class CardChartSchema(CardSessionsSchema): +# density: int = Field(default=7) + + +class CardConfigSchema(BaseModel): + col: Optional[int] = Field(default=None) + row: Optional[int] = Field(default=2) + position: Optional[int] = Field(default=0) + + +# class CardSchema(CardChartSchema): +class __CardSchema(CardSessionsSchema): + name: Optional[str] = Field(default=None) + is_public: bool = Field(default=True) + default_config: CardConfigSchema = Field(..., alias="config") + thumbnail: Optional[str] = Field(default=None) + metric_format: Optional[MetricFormatType] = Field(default=None) + + # view_type: Union[MetricTimeseriesViewType, \ + # MetricTableViewType, MetricOtherViewType] = Field(...) + view_type: Any + metric_type: MetricType = Field(...) + # metric_of: Union[MetricOfTimeseries, MetricOfTable, MetricOfErrors, \ + # MetricOfPerformance, MetricOfResources, MetricOfWebVitals, \ + # MetricOfClickMap] = Field(default=MetricOfTable.user_id) + metric_of: Any + metric_value: List[IssueType] = Field(default=[]) + + @computed_field + @property + def is_template(self) -> bool: + return self.metric_type in [MetricType.errors, MetricType.performance, + MetricType.resources, MetricType.web_vital] + + # TODO: finish the reset of these conditions + # @model_validator(mode='after') + # def __validator(cls, values): + # if values.metric_type == MetricType.click_map: + # # assert isinstance(values.metric_of, MetricOfClickMap), \ + # # f"metricOf must be of type {MetricOfClickMap} for metricType:{MetricType.click_map}" + # for s in values.series: + # for f in s.filter.events: + # assert f.type == EventType.location, f"only events of type:{EventType.location} are allowed for metricOf:{MetricType.click_map}" + # return values + + +class CardTimeSeries(__CardSchema): + metric_type: Literal[MetricType.timeseries] + metric_of: MetricOfTimeseries = Field(default=MetricOfTimeseries.session_count) + view_type: MetricTimeseriesViewType + + @model_validator(mode="before") + def __enforce_default(cls, values): + values["metricValue"] = [] + return values + + @model_validator(mode="after") + def __transform(cls, values): + values.metric_of = MetricOfTimeseries(values.metric_of) + return values + + +class CardTable(__CardSchema): + metric_type: Literal[MetricType.table] + metric_of: MetricOfTable = Field(default=MetricOfTable.user_id) + view_type: MetricTableViewType = Field(...) + + @model_validator(mode="before") + def __enforce_default(cls, values): + if values.get("metricOf") is not None and values.get("metricOf") != MetricOfTable.issues: + values["metricValue"] = [] + return values + + @model_validator(mode="after") + def __transform(cls, values): + values.metric_of = MetricOfTable(values.metric_of) + return values + + +class CardFunnel(__CardSchema): + metric_type: Literal[MetricType.funnel] + metric_of: MetricOfFunnels = Field(default=MetricOfFunnels.session_count) + view_type: MetricOtherViewType = Field(...) + + @model_validator(mode="before") + def __enforce_default(cls, values): + values["metricOf"] = MetricOfFunnels.session_count + values["viewType"] = MetricOtherViewType.other_chart + if values.get("series") is not None and len(values["series"]) > 0: + values["series"] = [values["series"][0]] + return values + + @model_validator(mode="after") + def __transform(cls, values): + values.metric_of = MetricOfTimeseries(values.metric_of) + return values + + +class CardErrors(__CardSchema): + metric_type: Literal[MetricType.errors] + metric_of: MetricOfErrors = Field(default=MetricOfErrors.impacted_sessions_by_js_errors) + view_type: MetricOtherViewType = Field(...) + + @model_validator(mode="before") + def __enforce_default(cls, values): + values["series"] = [] + return values + + @model_validator(mode="after") + def __transform(cls, values): + values.metric_of = MetricOfErrors(values.metric_of) + return values + + +class CardPerformance(__CardSchema): + metric_type: Literal[MetricType.performance] + metric_of: MetricOfPerformance = Field(default=MetricOfPerformance.cpu) + view_type: MetricOtherViewType = Field(...) + + @model_validator(mode="before") + def __enforce_default(cls, values): + values["series"] = [] + return values + + @model_validator(mode="after") + def __transform(cls, values): + values.metric_of = MetricOfPerformance(values.metric_of) + return values + + +class CardResources(__CardSchema): + metric_type: Literal[MetricType.resources] + metric_of: MetricOfResources = Field(default=MetricOfResources.missing_resources) + view_type: MetricOtherViewType = Field(...) + + @model_validator(mode="before") + def __enforce_default(cls, values): + values["series"] = [] + return values + + @model_validator(mode="after") + def __transform(cls, values): + values.metric_of = MetricOfResources(values.metric_of) + return values + + +class CardWebVital(__CardSchema): + metric_type: Literal[MetricType.web_vital] + metric_of: MetricOfWebVitals = Field(default=MetricOfWebVitals.avg_cpu) + view_type: MetricOtherViewType = Field(...) + + @model_validator(mode="before") + def __enforce_default(cls, values): + values["series"] = [] + return values + + @model_validator(mode="after") + def __transform(cls, values): + values.metric_of = MetricOfWebVitals(values.metric_of) + return values + + +class CardClickMap(__CardSchema): + metric_type: Literal[MetricType.click_map] + metric_of: MetricOfClickMap = Field(default=MetricOfClickMap.click_map_url) + view_type: MetricOtherViewType = Field(...) + + @model_validator(mode="before") + def __enforce_default(cls, values): + return values + + @model_validator(mode="after") + def __transform(cls, values): + values.metric_of = MetricOfClickMap(values.metric_of) + return values + + +class MetricOfInsights(str, Enum): + issue_categories = "issueCategories" + + +class CardInsights(__CardSchema): + metric_type: Literal[MetricType.insights] + metric_of: MetricOfInsights = Field(default=MetricOfInsights.issue_categories) + view_type: MetricOtherViewType = Field(...) + + @model_validator(mode="before") + def __enforce_default(cls, values): + values["view_type"] = MetricOtherViewType.list_chart + return values + + @model_validator(mode="after") + def __transform(cls, values): + values.metric_of = MetricOtherViewType(values.metric_of) + return values + + @model_validator(mode='after') + def restrictions(cls, values): + raise ValueError(f"metricType:{MetricType.insights} not supported yet.") + + +# class CardPathAnalysisSchema(BaseModel): +class CardPathAnalysisSchema(CardSessionsSchema): + filter: PathAnalysisSchema = Field(...) + density: int = Field(default=4, ge=2, le=10) + + @model_validator(mode="before") + def __enforce_default(cls, values): + if values.get("filter") is None and values.get("startTimestamp") and values.get("endTimestamp"): + values["filter"] = PathAnalysisSchema(startTimestamp=values["startTimestamp"], + endTimestamp=values["endTimestamp"], + density=values["density"]) + return values + + +class CardPathAnalysis(__CardSchema): + metric_type: Literal[MetricType.pathAnalysis] + metric_of: MetricOfPathAnalysis = Field(default=MetricOfPathAnalysis.session_count) + view_type: MetricOtherViewType = Field(...) + metric_value: List[ProductAnalyticsSelectedEventType] = Field(default=[ProductAnalyticsSelectedEventType.location]) + density: int = Field(default=4, ge=2, le=10) + + # TODO: testing + series: List[CardPathAnalysisSchema] = Field(default=[]) + + @model_validator(mode="before") + def __enforce_default(cls, values): + values["viewType"] = MetricOtherViewType.other_chart.value + if values.get("series") is not None and len(values["series"]) > 0: + values["series"] = [values["series"][0]] + return values + + @model_validator(mode="after") + def __enforce_metric_value(cls, values): + metric_value = [] + for s in values.series: + for f in s.filter.filters: + if f.type in (ProductAnalyticsFilterType.start_point, ProductAnalyticsFilterType.end_point): + for ff in f.filters: + metric_value.append(ff.type) + + if len(metric_value) > 0: + metric_value = remove_duplicate_values(metric_value) + values.metric_value = metric_value + + return values + + @model_validator(mode="after") + def __transform(cls, values): + # values.metric_of = MetricOfClickMap(values.metric_of) + return values + + +# Union of cards-schemas that doesn't change between FOSS and EE +__cards_union_base = Union[ + CardTimeSeries, CardTable, CardFunnel, + CardErrors, CardPerformance, CardResources, + CardWebVital, CardClickMap, + CardPathAnalysis] +CardSchema = ORUnion(Union[__cards_union_base, CardInsights], discriminator='metric_type') + + +# class UpdateCustomMetricsStatusSchema(BaseModel): +class UpdateCardStatusSchema(BaseModel): + active: bool = Field(...) + + +class SavedSearchSchema(BaseModel): + name: str = Field(...) + is_public: bool = Field(default=False) + filter: SessionsSearchPayloadSchema = Field([]) + + +class CreateDashboardSchema(BaseModel): + name: str = Field(..., min_length=1) + description: Optional[str] = Field(default='') + is_public: bool = Field(default=False) + is_pinned: bool = Field(default=False) + metrics: Optional[List[int]] = Field(default=[]) + + +class EditDashboardSchema(CreateDashboardSchema): + is_public: Optional[bool] = Field(default=None) + is_pinned: Optional[bool] = Field(default=None) + + +class UpdateWidgetPayloadSchema(BaseModel): + config: dict = Field(default={}) + + +class AddWidgetToDashboardPayloadSchema(UpdateWidgetPayloadSchema): + metric_id: int = Field(...) + + +class TemplatePredefinedUnits(str, Enum): + millisecond = "ms" + second = "s" + minute = "min" + memory = "mb" + frame = "f/s" + percentage = "%" + count = "count" + + +class LiveFilterType(str, Enum): + user_os = FilterType.user_os.value + user_browser = FilterType.user_browser.value + user_device = FilterType.user_device.value + user_country = FilterType.user_country.value + user_id = FilterType.user_id.value + user_anonymous_id = FilterType.user_anonymous_id.value + rev_id = FilterType.rev_id.value + platform = FilterType.platform.value + page_title = "pageTitle" + session_id = "sessionId" + metadata = FilterType.metadata.value + user_UUID = "userUuid" + tracker_version = "trackerVersion" + user_browser_version = "userBrowserVersion" + user_device_type = "userDeviceType" + + +class LiveSessionSearchFilterSchema(BaseModel): + value: Union[List[str], str] = Field(...) + type: LiveFilterType = Field(...) + source: Optional[str] = Field(default=None) + operator: Literal[SearchEventOperator._is, \ + SearchEventOperator._contains] = Field(default=SearchEventOperator._contains) + + transform = model_validator(mode='before')(transform_old_filter_type) + + @model_validator(mode='after') + def __validator(cls, values): + if values.type is not None and values.type == LiveFilterType.metadata: + assert values.source is not None, "source should not be null for METADATA type" + assert len(values.source) > 0, "source should not be empty for METADATA type" + return values + + +class LiveSessionsSearchPayloadSchema(_PaginatedSchema): + filters: List[LiveSessionSearchFilterSchema] = Field([]) + sort: Union[LiveFilterType, str] = Field(default="TIMESTAMP") + order: SortOrderType = Field(default=SortOrderType.desc) + + @model_validator(mode="before") + def __transform(cls, values): + if values.get("order") is not None: + values["order"] = values["order"].upper() + if values.get("filters") is not None: + i = 0 + while i < len(values["filters"]): + if values["filters"][i]["value"] is None or len(values["filters"][i]["value"]) == 0: + del values["filters"][i] + else: + i += 1 + for i in values["filters"]: + if i.get("type") == LiveFilterType.platform: + i["type"] = LiveFilterType.user_device_type + if values.get("sort") is not None: + if values["sort"].lower() == "startts": + values["sort"] = "TIMESTAMP" + return values + + +class IntegrationType(str, Enum): + github = "GITHUB" + jira = "JIRA" + slack = "SLACK" + ms_teams = "MSTEAMS" + sentry = "SENTRY" + bugsnag = "BUGSNAG" + rollbar = "ROLLBAR" + elasticsearch = "ELASTICSEARCH" + datadog = "DATADOG" + sumologic = "SUMOLOGIC" + stackdriver = "STACKDRIVER" + cloudwatch = "CLOUDWATCH" + newrelic = "NEWRELIC" + + +class SearchNoteSchema(_PaginatedSchema): + sort: str = Field(default="createdAt") + order: SortOrderType = Field(default=SortOrderType.desc) + tags: Optional[List[str]] = Field(default=[]) + shared_only: bool = Field(default=False) + mine_only: bool = Field(default=False) + + +class SessionNoteSchema(BaseModel): + message: str = Field(..., min_length=2) + tag: Optional[str] = Field(default=None) + timestamp: int = Field(default=-1) + is_public: bool = Field(default=False) + + +class SessionUpdateNoteSchema(SessionNoteSchema): + message: Optional[str] = Field(default=None, min_length=2) + timestamp: Optional[int] = Field(default=None, ge=-1) + is_public: Optional[bool] = Field(default=None) + + @model_validator(mode='after') + def __validator(cls, values): + assert len(values.keys()) > 0, "at least 1 attribute should be provided for update" + c = 0 + for v in values.values(): + if v is not None and (not isinstance(v, str) or len(v) > 0): + c += 1 + break + assert c > 0, "at least 1 value should be provided for update" + return values + + +class WebhookType(str, Enum): + webhook = "webhook" + slack = "slack" + email = "email" + msteams = "msteams" + + +class SearchCardsSchema(_PaginatedSchema): + order: SortOrderType = Field(default=SortOrderType.desc) + shared_only: bool = Field(default=False) + mine_only: bool = Field(default=False) + query: Optional[str] = Field(default=None) + + +class _ClickMapSearchEventRaw(SessionSearchEventSchema2): + type: Literal[EventType.location] = Field(...) + + +# class FlatClickMapSessionsSearch(SessionsSearchPayloadSchema): +class ClickMapSessionsSearch(SessionsSearchPayloadSchema): + events: Optional[List[_ClickMapSearchEventRaw]] = Field(default=[]) + filters: List[Union[SessionSearchFilterSchema, _ClickMapSearchEventRaw]] = Field(default=[]) + + @model_validator(mode="before") + def __transform(cls, values): + for f in values.get("filters", []): + if f.get("type") == FilterType.duration: + return values + values["filters"] = values.get("filters", []) + values["filters"].append({"value": [5000], "type": FilterType.duration, + "operator": SearchEventOperator._is, "filters": []}) + return values + + # @model_validator(mode='after') + # def flat_to_original(cls, values): + # if len(values.events) > 0: + # return values + # n_filters = [] + # n_events = [] + # for v in values.filters: + # if isinstance(v, _ClickMapSearchEventRaw): + # n_events.append(v) + # else: + # n_filters.append(v) + # values.events = n_events + # values.filters = n_filters + # return values + + +class IssueFilterType(str, Enum): + _selector = "CLICK_SELECTOR" + + +class IssueAdvancedFilter(BaseModel): + type: IssueFilterType = Field(default=IssueFilterType._selector) + value: List[str] = Field(default=[]) + operator: SearchEventOperator = Field(default=SearchEventOperator._is) + + +class ClickMapFilterSchema(BaseModel): + value: List[Literal[IssueType.click_rage, IssueType.dead_click]] = Field(default=[]) + type: Literal[FilterType.issue] = Field(...) + operator: Literal[SearchEventOperator._is, MathOperator._equal] = Field(...) + # source: Optional[Union[ErrorSource, str]] = Field(default=None) + filters: List[IssueAdvancedFilter] = Field(default=[]) + + +class GetHeatmapPayloadSchema(_TimedSchema): + startTimestamp: int = Field(default=TimeUTC.now(delta_days=-30)) + url: str = Field(...) + # issues: List[Literal[IssueType.click_rage, IssueType.dead_click]] = Field(default=[]) + filters: List[ClickMapFilterSchema] = Field(default=[]) + click_rage: bool = Field(default=False) + + +class FeatureFlagVariant(BaseModel): + variant_id: Optional[int] = Field(default=None) + value: str = Field(...) + description: Optional[str] = Field(default=None) + payload: Optional[str] = Field(default=None) + rollout_percentage: Optional[int] = Field(default=0, ge=0, le=100) + + +class FeatureFlagConditionFilterSchema(BaseModel): + is_event: Literal[False] = False + type: FilterType = Field(...) + value: List[str] = Field(default=[], min_length=1) + operator: Union[SearchEventOperator, MathOperator] = Field(...) + + +class FeatureFlagCondition(BaseModel): + condition_id: Optional[int] = Field(default=None) + name: str = Field(...) + rollout_percentage: Optional[int] = Field(default=0) + filters: List[FeatureFlagConditionFilterSchema] = Field(default=[]) + + +class SearchFlagsSchema(_PaginatedSchema): + limit: int = Field(default=15, gt=0, le=200) + user_id: Optional[int] = Field(default=None) + order: SortOrderType = Field(default=SortOrderType.desc) + query: Optional[str] = Field(default=None) + is_active: Optional[bool] = Field(default=None) + + +class FeatureFlagType(str, Enum): + single_variant = "single" + multi_variant = "multi" + + +class FeatureFlagStatus(BaseModel): + is_active: bool = Field(...) + + +class ModuleStatus(BaseModel): + module: str = Field(..., description="Possible values: notes, bugs, live", + regex="^(assist|notes|bug-reports|offline-recordings|alerts)$") + status: bool = Field(...) + + class Config: + alias_generator = attribute_to_camel_case + + +class FeatureFlagSchema(BaseModel): + payload: Optional[str] = Field(default=None) + flag_key: str = Field(..., pattern=r'^[a-zA-Z0-9\-]+$') + description: Optional[str] = Field(default=None) + flag_type: FeatureFlagType = Field(default=FeatureFlagType.single_variant) + is_persist: Optional[bool] = Field(default=False) + is_active: Optional[bool] = Field(default=True) + conditions: List[FeatureFlagCondition] = Field(default=[]) + variants: List[FeatureFlagVariant] = Field(default=[]) diff --git a/assist/package.json b/assist/package.json index 7a0026de7..621ea9e33 100644 --- a/assist/package.json +++ b/assist/package.json @@ -18,10 +18,10 @@ }, "homepage": "https://github.com/openreplay/openreplay#readme", "dependencies": { - "@maxmind/geoip2-node": "^3.5.0", + "@maxmind/geoip2-node": "^4.2.0", "express": "^4.18.2", - "jsonwebtoken": "^9.0.0", - "socket.io": "^4.6.1", + "jsonwebtoken": "^9.0.1", + "socket.io": "^4.7.2", "ua-parser-js": "^1.0.35" } } diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 98b09f866..154fbd07a 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -260,7 +260,6 @@ Pipfile.lock /Dockerfile_bundle /entrypoint.bundle.sh /chalicelib/core/heatmaps.py -/schemas.py #exp /chalicelib/core/custom_metrics.py /chalicelib/core/performance_event.py /chalicelib/core/saved_search.py @@ -270,4 +269,6 @@ Pipfile.lock /run-dev.sh /run-alerts-dev.sh /routers/subs/v1_api.py -#exp /chalicelib/core/dashboards.py \ No newline at end of file +#exp /chalicelib/core/dashboards.py +/schemas/overrides.py +/schemas/schemas.py diff --git a/ee/api/Pipfile b/ee/api/Pipfile index 9ca30c1c5..c6ce15f28 100644 --- a/ee/api/Pipfile +++ b/ee/api/Pipfile @@ -4,22 +4,24 @@ verify_ssl = true name = "pypi" [packages] -requests = "==2.31.0" urllib3 = "==1.26.16" -boto3 = "==1.26.148" -pyjwt = "==2.7.0" -psycopg2-binary = "==2.9.6" -elasticsearch = "==8.8.0" -jira = "==3.5.1" -fastapi = "==0.96.0" -uvicorn = {version = "==0.22.0", extras = ["standard"]} +requests = "==2.31.0" +boto3 = "==1.28.40" +pyjwt = "==2.8.0" +psycopg2-binary = "==2.9.7" +elasticsearch = "==8.9.0" +jira = "==3.5.2" +fastapi = "==0.103.1" +gunicorn = "==21.2.0" python-decouple = "==3.8" -pydantic = {version = "==1.10.8", extras = ["email"]} -apscheduler = "==3.10.1" -clickhouse-driver = {version = "==0.2.5", extras = ["lz4"]} +apscheduler = "==3.10.4" +python3-saml = "==1.15.0" python-multipart = "==0.0.6" -redis = "==4.5.5" -azure-storage-blob = "==12.16.0" +redis = "==5.0.0" +azure-storage-blob = "==12.17.0" +uvicorn = {version = "==0.23.2", extras = ["standard"]} +pydantic = {version = "==2.3.0", extras = ["email"]} +clickhouse-driver = {version = "==0.2.6", extras = ["lz4"]} [dev-packages] diff --git a/ee/api/app.py b/ee/api/app.py index 6390037a7..6ca8f2cfd 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -14,7 +14,11 @@ from chalicelib.core import traces from chalicelib.utils import events_queue from chalicelib.utils import helper from chalicelib.utils import pg_client -from routers import core, core_dynamic, ee, saml +from routers import core, core_dynamic +from routers import ee + +if config("ENABLE_SSO", cast=bool, default=True): + from routers import saml from crons import core_crons, ee_crons, core_dynamic_crons from routers.subs import insights, metrics, v1_api_ee from routers.subs import v1_api, health @@ -97,9 +101,6 @@ app.include_router(core_dynamic.app_apikey) app.include_router(ee.public_app) app.include_router(ee.app) app.include_router(ee.app_apikey) -app.include_router(saml.public_app) -app.include_router(saml.app) -app.include_router(saml.app_apikey) app.include_router(metrics.app) app.include_router(insights.app) app.include_router(v1_api.app_apikey) @@ -107,3 +108,8 @@ app.include_router(v1_api_ee.app_apikey) app.include_router(health.public_app) app.include_router(health.app) app.include_router(health.app_apikey) + +if config("ENABLE_SSO", cast=bool, default=True): + app.include_router(saml.public_app) + app.include_router(saml.app) + app.include_router(saml.app_apikey) diff --git a/ee/api/auth/auth_jwt.py b/ee/api/auth/auth_jwt.py index 477beba3d..f92a80480 100644 --- a/ee/api/auth/auth_jwt.py +++ b/ee/api/auth/auth_jwt.py @@ -6,19 +6,19 @@ from starlette import status from starlette.exceptions import HTTPException from chalicelib.core import authorizers, users -import schemas_ee +import schemas class JWTAuth(HTTPBearer): def __init__(self, auto_error: bool = True): super(JWTAuth, self).__init__(auto_error=auto_error) - async def __call__(self, request: Request) -> Optional[schemas_ee.CurrentContext]: + async def __call__(self, request: Request) -> Optional[schemas.CurrentContext]: credentials: HTTPAuthorizationCredentials = await super(JWTAuth, self).__call__(request) if credentials: if not credentials.scheme == "Bearer": raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid authentication scheme.") - jwt_payload = authorizers.jwt_authorizer(credentials.scheme + " " + credentials.credentials) + jwt_payload = authorizers.jwt_authorizer(scheme=credentials.scheme, token=credentials.credentials) auth_exists = jwt_payload is not None \ and users.auth_exists(user_id=jwt_payload.get("userId", -1), tenant_id=jwt_payload.get("tenantId", -1), @@ -27,18 +27,13 @@ class JWTAuth(HTTPBearer): if jwt_payload is None \ or jwt_payload.get("iat") is None or jwt_payload.get("aud") is None \ or not auth_exists: - print("JWTAuth: Token issue") if jwt_payload is not None: print(jwt_payload) - print(f"JWTAuth: user_id={jwt_payload.get('userId')} tenant_id={jwt_payload.get('tenantId')}") - if jwt_payload is None: - print("JWTAuth: jwt_payload is None") - print(credentials.scheme + " " + credentials.credentials) - if jwt_payload is not None and jwt_payload.get("iat") is None: - print("JWTAuth: iat is None") - if jwt_payload is not None and jwt_payload.get("aud") is None: - print("JWTAuth: aud is None") - if jwt_payload is not None and not auth_exists: + if jwt_payload.get("iat") is None: + print("JWTAuth: iat is None") + if jwt_payload.get("aud") is None: + print("JWTAuth: aud is None") + if not auth_exists: print("JWTAuth: not users.auth_exists") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid token or expired token.") @@ -47,12 +42,14 @@ class JWTAuth(HTTPBearer): print("JWTAuth: User not found.") raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="User not found.") jwt_payload["authorizer_identity"] = "jwt" - print(jwt_payload) request.state.authorizer_identity = "jwt" - request.state.currentContext = schemas_ee.CurrentContext(tenant_id=jwt_payload.get("tenantId", -1), - user_id=jwt_payload.get("userId", -1), - email=user["email"], - permissions=user["permissions"]) + if user["serviceAccount"]: + user["permissions"] = [p.value for p in schemas_ee.ServicePermissions] + request.state.currentContext = schemas.CurrentContext(tenantId=jwt_payload.get("tenantId", -1), + userId=jwt_payload.get("userId", -1), + email=user["email"], + permissions=user["permissions"], + serviceAccount=user["serviceAccount"]) return request.state.currentContext else: diff --git a/ee/api/chalicelib/core/assist_records.py b/ee/api/chalicelib/core/assist_records.py index b06a282b8..62f753e09 100644 --- a/ee/api/chalicelib/core/assist_records.py +++ b/ee/api/chalicelib/core/assist_records.py @@ -3,7 +3,7 @@ import hashlib from decouple import config import schemas -import schemas_ee +import schemas from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.storage import StorageClient @@ -14,16 +14,16 @@ def generate_file_key(project_id, key): return f"{project_id}/{hashlib.md5(key.encode()).hexdigest()}" -def presign_record(project_id, data: schemas_ee.AssistRecordPayloadSchema, context: schemas_ee.CurrentContext): +def presign_record(project_id, data: schemas.AssistRecordPayloadSchema, context: schemas.CurrentContext): key = generate_file_key(project_id=project_id, key=f"{TimeUTC.now()}-{data.name}") presigned_url = StorageClient.get_presigned_url_for_upload(bucket=config('ASSIST_RECORDS_BUCKET'), expires_in=1800, key=key) return {"URL": presigned_url, "key": key} -def save_record(project_id, data: schemas_ee.AssistRecordSavePayloadSchema, context: schemas_ee.CurrentContext): +def save_record(project_id, data: schemas.AssistRecordSavePayloadSchema, context: schemas.CurrentContext): extra.tag_record(file_key=data.key, tag_value=config('RETENTION_L_VALUE', default='vault')) - params = {"user_id": context.user_id, "project_id": project_id, **data.dict()} + params = {"user_id": context.user_id, "project_id": project_id, **data.model_dump()} with pg_client.PostgresClient() as cur: query = cur.mogrify( f"""INSERT INTO assist_records(project_id, user_id, name, file_key, duration, session_id) @@ -40,8 +40,8 @@ def save_record(project_id, data: schemas_ee.AssistRecordSavePayloadSchema, cont return result -def search_records(project_id: int, data: schemas_ee.AssistRecordSearchPayloadSchema, - context: schemas_ee.CurrentContext): +def search_records(project_id: int, data: schemas.AssistRecordSearchPayloadSchema, + context: schemas.CurrentContext): conditions = ["projects.tenant_id=%(tenant_id)s", "projects.deleted_at ISNULL", "projects.project_id=%(project_id)s", @@ -54,7 +54,7 @@ def search_records(project_id: int, data: schemas_ee.AssistRecordSearchPayloadSc params = {"tenant_id": context.tenant_id, "project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp, "p_start": (data.page - 1) * data.limit, "p_limit": data.limit, - **data.dict()} + **data.model_dump()} if data.user_id is not None: conditions.append("assist_records.user_id=%(user_id)s") if data.query is not None and len(data.query) > 0: @@ -85,7 +85,7 @@ def search_records(project_id: int, data: schemas_ee.AssistRecordSearchPayloadSc return results -def get_record(project_id, record_id, context: schemas_ee.CurrentContext): +def get_record(project_id, record_id, context: schemas.CurrentContext): conditions = ["projects.tenant_id=%(tenant_id)s", "projects.deleted_at ISNULL", "assist_records.record_id=%(record_id)s", @@ -110,8 +110,8 @@ def get_record(project_id, record_id, context: schemas_ee.CurrentContext): return result -def update_record(project_id, record_id, data: schemas_ee.AssistRecordUpdatePayloadSchema, - context: schemas_ee.CurrentContext): +def update_record(project_id, record_id, data: schemas.AssistRecordUpdatePayloadSchema, + context: schemas.CurrentContext): conditions = ["assist_records.record_id=%(record_id)s", "assist_records.deleted_at ISNULL"] params = {"tenant_id": context.tenant_id, "project_id": project_id, "record_id": record_id, "name": data.name} with pg_client.PostgresClient() as cur: @@ -136,7 +136,7 @@ def update_record(project_id, record_id, data: schemas_ee.AssistRecordUpdatePayl return result -def delete_record(project_id, record_id, context: schemas_ee.CurrentContext): +def delete_record(project_id, record_id, context: schemas.CurrentContext): conditions = ["assist_records.record_id=%(record_id)s"] params = {"tenant_id": context.tenant_id, "project_id": project_id, "record_id": record_id} with pg_client.PostgresClient() as cur: diff --git a/ee/api/chalicelib/core/authorizers.py b/ee/api/chalicelib/core/authorizers.py index 21a6634bd..20d102209 100644 --- a/ee/api/chalicelib/core/authorizers.py +++ b/ee/api/chalicelib/core/authorizers.py @@ -7,13 +7,12 @@ from chalicelib.utils import helper from chalicelib.utils.TimeUTC import TimeUTC -def jwt_authorizer(token): - token = token.split(" ") - if len(token) != 2 or token[0].lower() != "bearer": +def jwt_authorizer(scheme: str, token: str): + if scheme.lower() != "bearer": return None try: payload = jwt.decode( - token[1], + token, config("jwt_secret"), algorithms=config("jwt_algorithm"), audience=[f"front:{helper.get_stage_name()}"] @@ -23,6 +22,7 @@ def jwt_authorizer(token): return None except BaseException as e: print("! JWT Base Exception") + print(e) return None return payload diff --git a/ee/api/chalicelib/core/custom_metrics.py b/ee/api/chalicelib/core/custom_metrics.py index a346554b3..7862ddd1a 100644 --- a/ee/api/chalicelib/core/custom_metrics.py +++ b/ee/api/chalicelib/core/custom_metrics.py @@ -5,8 +5,8 @@ from decouple import config from fastapi import HTTPException, status import schemas -import schemas_ee -from chalicelib.core import funnels, issues, metrics, click_maps, sessions_insights, sessions_mobs, sessions_favorite +from chalicelib.core import funnels, issues, metrics, click_maps, sessions_insights, sessions_mobs, sessions_favorite, \ + product_analytics from chalicelib.utils import helper, pg_client from chalicelib.utils.TimeUTC import TimeUTC from chalicelib.utils.storage import StorageClient, extra @@ -25,25 +25,24 @@ else: PIE_CHART_GROUP = 5 -def __try_live(project_id, data: schemas_ee.CardSchema): +# TODO: refactor this to split +# timeseries / +# table of errors / table of issues / table of browsers / table of devices / table of countries / table of URLs +def __try_live(project_id, data: schemas.CardSchema): results = [] for i, s in enumerate(data.series): - s.filter.startDate = data.startTimestamp - s.filter.endDate = data.endTimestamp results.append(sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, view_type=data.view_type, metric_type=data.metric_type, metric_of=data.metric_of, metric_value=data.metric_value)) if data.view_type == schemas.MetricTimeseriesViewType.progress: r = {"count": results[-1]} - diff = s.filter.endDate - s.filter.startDate - s.filter.endDate = s.filter.startDate - s.filter.startDate = s.filter.endDate - diff + diff = s.filter.endTimestamp - s.filter.startTimestamp + s.filter.endTimestamp = s.filter.startTimestamp + s.filter.startTimestamp = s.filter.endTimestamp - diff r["previousCount"] = sessions.search2_series(data=s.filter, project_id=project_id, density=data.density, view_type=data.view_type, metric_type=data.metric_type, metric_of=data.metric_of, metric_value=data.metric_value) r["countProgress"] = helper.__progress(old_val=r["previousCount"], new_val=r["count"]) - # r["countProgress"] = ((r["count"] - r["previousCount"]) / r["previousCount"]) * 100 \ - # if r["previousCount"] > 0 else 0 r["seriesName"] = s.name if s.name else i + 1 r["seriesId"] = s.series_id if s.series_id else None results[-1] = r @@ -58,108 +57,97 @@ def __try_live(project_id, data: schemas_ee.CardSchema): return results -def __is_funnel_chart(data: schemas_ee.CardSchema): +def __is_funnel_chart(data: schemas.CardSchema): return data.metric_type == schemas.MetricType.funnel -def __get_funnel_chart(project_id, data: schemas_ee.CardSchema): +def __get_funnel_chart(project_id: int, data: schemas.CardFunnel, user_id: int = None): if len(data.series) == 0: return { "stages": [], "totalDropDueToIssues": 0 } - data.series[0].filter.startDate = data.startTimestamp - data.series[0].filter.endDate = data.endTimestamp return funnels.get_top_insights_on_the_fly_widget(project_id=project_id, data=data.series[0].filter) -def __is_errors_list(data: schemas_ee.CardSchema): +def __is_errors_list(data: schemas.CardSchema): return data.metric_type == schemas.MetricType.table \ and data.metric_of == schemas.MetricOfTable.errors -def __get_errors_list(project_id, user_id, data: schemas_ee.CardSchema): +def __get_errors_list(project_id, user_id, data: schemas.CardSchema): if len(data.series) == 0: return { "total": 0, "errors": [] } - data.series[0].filter.startDate = data.startTimestamp - data.series[0].filter.endDate = data.endTimestamp - data.series[0].filter.page = data.page - data.series[0].filter.limit = data.limit return errors.search(data.series[0].filter, project_id=project_id, user_id=user_id) -def __is_sessions_list(data: schemas_ee.CardSchema): +def __is_sessions_list(data: schemas.CardSchema): return data.metric_type == schemas.MetricType.table \ and data.metric_of == schemas.MetricOfTable.sessions -def __get_sessions_list(project_id, user_id, data: schemas_ee.CardSchema): +def __get_sessions_list(project_id, user_id, data: schemas.CardSchema): if len(data.series) == 0: print("empty series") return { "total": 0, "sessions": [] } - data.series[0].filter.startDate = data.startTimestamp - data.series[0].filter.endDate = data.endTimestamp - data.series[0].filter.page = data.page - data.series[0].filter.limit = data.limit return sessions.search_sessions(data=data.series[0].filter, project_id=project_id, user_id=user_id) -def __is_predefined(data: schemas_ee.CardSchema): +def __is_predefined(data: schemas.CardSchema): return data.is_template -def __is_click_map(data: schemas_ee.CardSchema): +def __is_click_map(data: schemas.CardSchema): return data.metric_type == schemas.MetricType.click_map -def __get_click_map_chart(project_id, user_id, data: schemas_ee.CardSchema, include_mobs: bool = True): +def __get_click_map_chart(project_id, user_id, data: schemas.CardClickMap, include_mobs: bool = True): if len(data.series) == 0: return None - data.series[0].filter.startDate = data.startTimestamp - data.series[0].filter.endDate = data.endTimestamp return click_maps.search_short_session(project_id=project_id, user_id=user_id, - data=schemas.FlatClickMapSessionsSearch(**data.series[0].filter.dict()), + data=schemas.ClickMapSessionsSearch( + **data.series[0].filter.model_dump()), include_mobs=include_mobs) # EE only -def __is_insights(data: schemas_ee.CardSchema): +def __is_insights(data: schemas.CardSchema): return data.metric_type == schemas.MetricType.insights # EE only -def __get_insights_chart(project_id, user_id, data: schemas_ee.CardSchema): +def __get_insights_chart(project_id: int, data: schemas.CardInsights, user_id: int = None): return sessions_insights.fetch_selected(project_id=project_id, - data=schemas_ee.GetInsightsSchema(startTimestamp=data.startTimestamp, - endTimestamp=data.endTimestamp, - metricValue=data.metric_value, - series=data.series)) + data=schemas.GetInsightsSchema(startTimestamp=data.startTimestamp, + endTimestamp=data.endTimestamp, + metricValue=data.metric_value, + series=data.series)) -def merged_live(project_id, data: schemas_ee.CardSchema, user_id=None): - if data.is_template: - return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.dict()) - elif __is_funnel_chart(data): - return __get_funnel_chart(project_id=project_id, data=data) - elif __is_errors_list(data): - return __get_errors_list(project_id=project_id, user_id=user_id, data=data) - elif __is_sessions_list(data): - return __get_sessions_list(project_id=project_id, user_id=user_id, data=data) - elif __is_click_map(data): - return __get_click_map_chart(project_id=project_id, user_id=user_id, data=data) - # EE only - elif __is_insights(data): - return __get_insights_chart(project_id=project_id, user_id=user_id, data=data) - elif len(data.series) == 0: - return [] +def __get_path_analysis_chart(project_id: int, user_id: int, data: schemas.CardPathAnalysis): + if len(data.series) == 0: + data.series.append( + schemas.CardPathAnalysisSchema(startTimestamp=data.startTimestamp, endTimestamp=data.endTimestamp)) + elif not isinstance(data.series[0].filter, schemas.PathAnalysisSchema): + data.series[0].filter = schemas.PathAnalysisSchema() + + return product_analytics.path_analysis(project_id=project_id, data=data.series[0].filter, density=data.density, + selected_event_type=data.metric_value, hide_minor_paths=data.hide_excess) + + +def __is_path_analysis(data: schemas.CardSchema): + return data.metric_type == schemas.MetricType.pathAnalysis + + +def __get_timeseries_chart(project_id: int, data: schemas.CardTimeSeries, user_id: int = None): series_charts = __try_live(project_id=project_id, data=data) - if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: + if data.view_type == schemas.MetricTimeseriesViewType.progress: return series_charts results = [{}] * len(series_charts[0]) for i in range(len(results)): @@ -169,29 +157,137 @@ def merged_live(project_id, data: schemas_ee.CardSchema, user_id=None): return results -def __merge_metric_with_data(metric: schemas_ee.CardSchema, - data: schemas.CardChartSchema) -> schemas_ee.CardSchema: +def empty(**args): + raise Exception("not supported") + + +def __get_table_of_user_ids(project_id: int, data: schemas.CardTable, user_id: int = None): + series_charts = __try_live(project_id=project_id, data=data) + return series_charts + + +def __get_table_of_sessions(project_id: int, data: schemas.CardTable, user_id): + return __get_sessions_list(project_id=project_id, user_id=user_id, data=data) + + +def __get_table_of_errors(project_id: int, data: schemas.CardTable, user_id: int): + return __get_errors_list(project_id=project_id, user_id=user_id, data=data) + + +def __get_table_of_issues(project_id: int, data: schemas.CardTable, user_id: int = None): + return __try_live(project_id=project_id, data=data) + + +def __get_table_of_browsers(project_id: int, data: schemas.CardTable, user_id: int = None): + return __try_live(project_id=project_id, data=data) + + +def __get_table_of_devises(project_id: int, data: schemas.CardTable, user_id: int = None): + return __try_live(project_id=project_id, data=data) + + +def __get_table_of_countries(project_id: int, data: schemas.CardTable, user_id: int = None): + return __try_live(project_id=project_id, data=data) + + +def __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int = None): + return __try_live(project_id=project_id, data=data) + + +def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int): + supported = { + schemas.MetricOfTable.sessions: __get_table_of_sessions, + schemas.MetricOfTable.errors: __get_table_of_errors, + schemas.MetricOfTable.user_id: __get_table_of_user_ids, + schemas.MetricOfTable.issues: __get_table_of_issues, + schemas.MetricOfTable.user_browser: __get_table_of_browsers, + schemas.MetricOfTable.user_device: __get_table_of_devises, + schemas.MetricOfTable.user_country: __get_table_of_countries, + schemas.MetricOfTable.visited_url: __get_table_of_urls, + } + return supported.get(data.metric_of, empty)(project_id=project_id, data=data, user_id=user_id) + + +def get_chart(project_id: int, data: schemas.CardSchema, user_id: int): + if data.is_template: + return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.model_dump()) + + supported = { + schemas.MetricType.timeseries: __get_timeseries_chart, + schemas.MetricType.table: __get_table_chart, + schemas.MetricType.click_map: __get_click_map_chart, + schemas.MetricType.funnel: __get_funnel_chart, + schemas.MetricType.insights: __get_insights_chart, + schemas.MetricType.pathAnalysis: __get_path_analysis_chart + } + return supported.get(data.metric_type, empty)(project_id=project_id, data=data, user_id=user_id) + + +def merged_live(project_id, data: schemas.CardSchema, user_id=None): + return get_chart(project_id=project_id, data=data, user_id=user_id) + print("---1") + if data.is_template: + print("---2") + return get_predefined_metric(key=data.metric_of, project_id=project_id, data=data.model_dump()) + elif __is_funnel_chart(data): + print("---3") + return __get_funnel_chart(project_id=project_id, data=data) + elif __is_errors_list(data): + print("---4") + return __get_errors_list(project_id=project_id, user_id=user_id, data=data) + elif __is_sessions_list(data): + print("---5") + return __get_sessions_list(project_id=project_id, user_id=user_id, data=data) + elif __is_click_map(data): + print("---6") + return __get_click_map_chart(project_id=project_id, user_id=user_id, data=data) + # EE only + elif __is_insights(data): + return __get_insights_chart(project_id=project_id, user_id=user_id, data=data) + elif __is_path_analysis(data): + print("---7") + return __get_path_analysis_chart(project_id=project_id, data=data) + elif len(data.series) == 0: + print("---8") + return [] + series_charts = __try_live(project_id=project_id, data=data) + print("---9") + if data.view_type == schemas.MetricTimeseriesViewType.progress or data.metric_type == schemas.MetricType.table: + print("---10") + return series_charts + results = [{}] * len(series_charts[0]) + print("---11") + for i in range(len(results)): + for j, series_chart in enumerate(series_charts): + results[i] = {**results[i], "timestamp": series_chart[i]["timestamp"], + data.series[j].name if data.series[j].name else j + 1: series_chart[i]["count"]} + return results + + +def __merge_metric_with_data(metric: schemas.CardSchema, + data: schemas.CardSessionsSchema) -> schemas.CardSchema: if data.series is not None and len(data.series) > 0: metric.series = data.series - metric: schemas_ee.CardSchema = schemas_ee.CardSchema( - **{**data.dict(by_alias=True), **metric.dict(by_alias=True)}) + # TODO: try to refactor this + metric: schemas.CardSchema = schemas.CardSchema(**{**data.model_dump(by_alias=True), + **metric.model_dump(by_alias=True)}) if len(data.filters) > 0 or len(data.events) > 0: for s in metric.series: if len(data.filters) > 0: s.filter.filters += data.filters if len(data.events) > 0: s.filter.events += data.events - metric.limit = data.limit - metric.page = data.page - metric.startTimestamp = data.startTimestamp - metric.endTimestamp = data.endTimestamp + # metric.limit = data.limit + # metric.page = data.page + # metric.startTimestamp = data.startTimestamp + # metric.endTimestamp = data.endTimestamp return metric -def make_chart(project_id, user_id, data: schemas.CardChartSchema, metric: schemas_ee.CardSchema): +def make_chart(project_id, user_id, data: schemas.CardSessionsSchema, metric: schemas.CardSchema): if metric is None: return None - metric: schemas_ee.CardSchema = __merge_metric_with_data(metric=metric, data=data) + metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) return merged_live(project_id=project_id, data=metric, user_id=user_id) @@ -201,8 +297,8 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchem raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) if raw_metric is None: return None - metric: schemas_ee.CardSchema = schemas_ee.CardSchema(**raw_metric) - metric: schemas_ee.CardSchema = __merge_metric_with_data(metric=metric, data=data) + metric: schemas.CardSchema = schemas.CardSchema(**raw_metric) + metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) if metric is None: return None results = [] @@ -210,10 +306,10 @@ def get_sessions(project_id, user_id, metric_id, data: schemas.CardSessionsSchem # if __is_click_map(metric) and raw_metric.get("data") is not None: # is_click_map = True for s in metric.series: - s.filter.startDate = data.startTimestamp - s.filter.endDate = data.endTimestamp - s.filter.limit = data.limit - s.filter.page = data.page + # s.filter.startTimestamp = data.startTimestamp + # s.filter.endTimestamp = data.endTimestamp + # s.filter.limit = data.limit + # s.filter.page = data.page # if is_click_map: # results.append( # {"seriesId": s.series_id, "seriesName": s.name, "total": 1, "sessions": [raw_metric["data"]]}) @@ -228,15 +324,11 @@ def get_funnel_issues(project_id, user_id, metric_id, data: schemas.CardSessions raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) if raw_metric is None: return None - metric: schemas_ee.CardSchema = schemas_ee.CardSchema(**raw_metric) - metric: schemas_ee.CardSchema = __merge_metric_with_data(metric=metric, data=data) + metric: schemas.CardSchema = schemas.CardSchema(**raw_metric) + metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) if metric is None: return None for s in metric.series: - s.filter.startDate = data.startTimestamp - s.filter.endDate = data.endTimestamp - s.filter.limit = data.limit - s.filter.page = data.page return {"seriesId": s.series_id, "seriesName": s.name, **funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter)} @@ -245,28 +337,20 @@ def get_errors_list(project_id, user_id, metric_id, data: schemas.CardSessionsSc raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) if raw_metric is None: return None - metric: schemas_ee.CardSchema = schemas_ee.CardSchema(**raw_metric) - metric: schemas_ee.CardSchema = __merge_metric_with_data(metric=metric, data=data) + metric: schemas.CardSchema = schemas.CardSchema(**raw_metric) + metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) if metric is None: return None for s in metric.series: - s.filter.startDate = data.startTimestamp - s.filter.endDate = data.endTimestamp - s.filter.limit = data.limit - s.filter.page = data.page return {"seriesId": s.series_id, "seriesName": s.name, **errors.search(data=s.filter, project_id=project_id, user_id=user_id)} def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema): results = [] - if data.series is None: + if len(data.series) == 0: return results for s in data.series: - s.filter.startDate = data.startTimestamp - s.filter.endDate = data.endTimestamp - s.filter.limit = data.limit - s.filter.page = data.page if len(data.filters) > 0: s.filter.filters += data.filters if len(data.events) > 0: @@ -277,7 +361,7 @@ def try_sessions(project_id, user_id, data: schemas.CardSessionsSchema): return results -def create(project_id, user_id, data: schemas_ee.CardSchema, dashboard=False): +def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False): with pg_client.PostgresClient() as cur: session_data = None if __is_click_map(data): @@ -299,13 +383,13 @@ def create(project_id, user_id, data: schemas_ee.CardSchema, dashboard=False): session_data = json.dumps(session_data) _data = {"session_data": session_data} for i, s in enumerate(data.series): - for k in s.dict().keys(): + for k in s.model_dump().keys(): _data[f"{k}_{i}"] = s.__getattribute__(k) _data[f"index_{i}"] = i _data[f"filter_{i}"] = s.filter.json() series_len = len(data.series) - params = {"user_id": user_id, "project_id": project_id, **data.dict(), **_data} - params["default_config"] = json.dumps(data.default_config.dict()) + params = {"user_id": user_id, "project_id": project_id, **data.model_dump(), **_data} + params["default_config"] = json.dumps(data.default_config.model_dump()) query = """INSERT INTO metrics (project_id, user_id, name, is_public, view_type, metric_type, metric_of, metric_value, metric_format, default_config, thumbnail, data) @@ -331,7 +415,7 @@ def create(project_id, user_id, data: schemas_ee.CardSchema, dashboard=False): return {"data": get_card(metric_id=r["metric_id"], project_id=project_id, user_id=user_id)} -def update(metric_id, user_id, project_id, data: schemas_ee.UpdateCardSchema): +def update_card(metric_id, user_id, project_id, data: schemas.CardSchema): metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) if metric is None: return None @@ -344,7 +428,7 @@ def update(metric_id, user_id, project_id, data: schemas_ee.UpdateCardSchema): "user_id": user_id, "project_id": project_id, "view_type": data.view_type, "metric_type": data.metric_type, "metric_of": data.metric_of, "metric_value": data.metric_value, "metric_format": data.metric_format, - "config": json.dumps(data.default_config.dict()), "thumbnail": data.thumbnail} + "config": json.dumps(data.default_config.model_dump()), "thumbnail": data.thumbnail} for i, s in enumerate(data.series): prefix = "u_" if s.index is None: @@ -355,7 +439,7 @@ def update(metric_id, user_id, project_id, data: schemas_ee.UpdateCardSchema): else: u_series.append({"i": i, "s": s}) u_series_ids.append(s.series_id) - ns = s.dict() + ns = s.model_dump() for k in ns.keys(): if k == "filter": ns[k] = json.dumps(ns[k]) @@ -477,7 +561,7 @@ def get_all(project_id, user_id): return result -def delete(project_id, metric_id, user_id): +def delete_card(project_id, metric_id, user_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify("""\ @@ -485,8 +569,7 @@ def delete(project_id, metric_id, user_id): SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now()) WHERE project_id = %(project_id)s AND metric_id = %(metric_id)s - AND (user_id = %(user_id)s OR is_public) - RETURNING data;""", + AND (user_id = %(user_id)s OR is_public);""", {"metric_id": metric_id, "project_id": project_id, "user_id": user_id}) ) # for EE only @@ -596,13 +679,13 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, flatten=False) if metric is None: return None - metric: schemas_ee.CardSchema = schemas.CardSchema(**metric) - metric: schemas_ee.CardSchema = __merge_metric_with_data(metric=metric, data=data) + metric: schemas.CardSchema = schemas.CardSchema(**metric) + metric: schemas.CardSchema = __merge_metric_with_data(metric=metric, data=data) if metric is None: return None for s in metric.series: - s.filter.startDate = data.startTimestamp - s.filter.endDate = data.endTimestamp + s.filter.startTimestamp = data.startTimestamp + s.filter.endTimestamp = data.endTimestamp s.filter.limit = data.limit s.filter.page = data.page issues_list = funnels.get_issues_on_the_fly_widget(project_id=project_id, data=s.filter).get("issues", {}) @@ -628,13 +711,15 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, "issue": issue} -def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChartSchema): +def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessionsSchema): raw_metric: dict = get_card(metric_id=metric_id, project_id=project_id, user_id=user_id, include_data=True) if raw_metric is None: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="card not found") - metric: schemas_ee.CardSchema = schemas_ee.CardSchema(**raw_metric) + raw_metric["startTimestamp"] = data.startTimestamp + raw_metric["endTimestamp"] = data.endTimestamp + metric: schemas.CardSchema = schemas.CardSchema(**raw_metric) if metric.is_template: - return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.dict()) + return get_predefined_metric(key=metric.metric_of, project_id=project_id, data=data.model_dump()) elif __is_click_map(metric): if raw_metric["data"]: keys = sessions_mobs. \ @@ -654,53 +739,52 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardChart return make_chart(project_id=project_id, user_id=user_id, data=data, metric=metric) -PREDEFINED = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions, - schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time, - schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time, - schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time, - schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start, - schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel, - schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages, - schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration, - schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time, - schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time, - schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time, - schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint, - schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded, - schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit, - schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive, - schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests, - schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render, - schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption, - schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu, - schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps, - schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors, - schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx, - schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx, - schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains, - schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors, - schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type, - schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party, - schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location, - schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains, - schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser, - schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render, - schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages, - schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption, - schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu, - schemas.MetricOfPerformance.fps: metrics.get_avg_fps, - schemas.MetricOfPerformance.crashes: metrics.get_crashes, - schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete, - schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time, - schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time, - schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution, - schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend, - schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources, - schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time, - schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end, - schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, } - - def get_predefined_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \ schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict): - return PREDEFINED.get(key, lambda *args: None)(project_id=project_id, **data) + supported = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions, + schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time, + schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time, + schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time, + schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start, + schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel, + schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages, + schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration, + schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time, + schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time, + schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time, + schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint, + schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded, + schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit, + schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive, + schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests, + schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render, + schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption, + schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu, + schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps, + schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors, + schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx, + schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx, + schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains, + schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors, + schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type, + schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party, + schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location, + schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains, + schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser, + schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render, + schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages, + schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption, + schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu, + schemas.MetricOfPerformance.fps: metrics.get_avg_fps, + schemas.MetricOfPerformance.crashes: metrics.get_crashes, + schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete, + schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time, + schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time, + schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution, + schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend, + schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources, + schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time, + schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end, + schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, } + + return supported.get(key, lambda *args: None)(project_id=project_id, **data) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index c191c69cf..3edfa929e 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -461,10 +461,10 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): pg_sub_query_chart.append("errors.error_id =details.error_id") statuses = [] error_ids = None - if data.startDate is None: - data.startDate = TimeUTC.now(-30) - if data.endDate is None: - data.endDate = TimeUTC.now(1) + if data.startTimestamp is None: + data.startTimestamp = TimeUTC.now(-30) + if data.endTimestamp is None: + data.endTimestamp = TimeUTC.now(1) if len(data.events) > 0 or len(data.filters) > 0: print("-- searching for sessions before errors") statuses = sessions.search_sessions(data=data, project_id=project_id, user_id=user_id, errors_only=True, @@ -473,18 +473,18 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): return empty_response error_ids = [e["errorId"] for e in statuses] with pg_client.PostgresClient() as cur: - step_size = __get_step_size(data.startDate, data.endDate, data.density, factor=1) + step_size = __get_step_size(data.startTimestamp, data.endTimestamp, data.density, factor=1) sort = __get_sort_key('datetime') if data.sort is not None: sort = __get_sort_key(data.sort) - order = schemas.SortOrderType.desc.value + order = schemas.SortOrderType.desc if data.order is not None: - order = data.order.value + order = data.order extra_join = "" params = { - "startDate": data.startDate, - "endDate": data.endDate, + "startDate": data.startTimestamp, + "endDate": data.endTimestamp, "project_id": project_id, "userId": user_id, "step_size": step_size} @@ -716,41 +716,3 @@ def change_state(project_id, user_id, error_id, action): for e in errors: e["status"] = row["status"] return {"data": errors} - - -MAX_RANK = 2 - - -def __status_rank(status): - return { - 'unresolved': MAX_RANK - 2, - 'ignored': MAX_RANK - 1, - 'resolved': MAX_RANK - }.get(status) - - -def stats(project_id, user_id, startTimestamp=TimeUTC.now(delta_days=-7), endTimestamp=TimeUTC.now()): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - """WITH user_viewed AS (SELECT error_id FROM public.user_viewed_errors WHERE user_id = %(user_id)s) - SELECT COUNT(timed_errors.*) AS unresolved_and_unviewed - FROM (SELECT root_error.error_id - FROM events.errors - INNER JOIN public.errors AS root_error USING (error_id) - LEFT JOIN user_viewed USING (error_id) - WHERE project_id = %(project_id)s - AND timestamp >= %(startTimestamp)s - AND timestamp <= %(endTimestamp)s - AND source = 'js_exception' - AND root_error.status = 'unresolved' - AND user_viewed.error_id ISNULL - LIMIT 1 - ) AS timed_errors;""", - {"project_id": project_id, "user_id": user_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp}) - cur.execute(query=query) - row = cur.fetchone() - - return { - "data": helper.dict_to_camel_case(row) - } diff --git a/ee/api/chalicelib/core/notifications.py b/ee/api/chalicelib/core/notifications.py index 7562457a4..fbe399310 100644 --- a/ee/api/chalicelib/core/notifications.py +++ b/ee/api/chalicelib/core/notifications.py @@ -44,7 +44,7 @@ def get_all_count(tenant_id, user_id): def view_notification(user_id, notification_ids=[], tenant_id=None, startTimestamp=None, endTimestamp=None): - if (notification_ids is None or len(notification_ids) == 0) and endTimestamp is None: + if len(notification_ids) == 0 and endTimestamp is None: return False if startTimestamp is None: startTimestamp = 0 diff --git a/ee/api/chalicelib/core/permissions.py b/ee/api/chalicelib/core/permissions.py index bcc30b891..82389d185 100644 --- a/ee/api/chalicelib/core/permissions.py +++ b/ee/api/chalicelib/core/permissions.py @@ -1,9 +1,9 @@ from fastapi.security import SecurityScopes -import schemas_ee +import schemas -def check(security_scopes: SecurityScopes, context: schemas_ee.CurrentContext): +def check(security_scopes: SecurityScopes, context: schemas.CurrentContext): for scope in security_scopes.scopes: if scope not in context.permissions: return False diff --git a/ee/api/chalicelib/core/product_analytics.py b/ee/api/chalicelib/core/product_analytics.py index ff98f026c..cc9d6b913 100644 --- a/ee/api/chalicelib/core/product_analytics.py +++ b/ee/api/chalicelib/core/product_analytics.py @@ -1,9 +1,14 @@ +from typing import List + import schemas from chalicelib.core.metrics import __get_basic_constraints, __get_meta_constraint from chalicelib.core.metrics import __get_constraint_values, __complete_missing_steps from chalicelib.utils import ch_client from chalicelib.utils import helper, dev from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.utils import sql_helper as sh +from chalicelib.core import metadata +from time import time def __transform_journey(rows): @@ -20,1016 +25,1275 @@ def __transform_journey(rows): return {"nodes": nodes, "links": sorted(links, key=lambda x: x["value"], reverse=True)} -JOURNEY_DEPTH = 5 -JOURNEY_TYPES = { - "PAGES": {"table": "pages", "column": "url_path"}, - "CLICK": {"table": "clicks", "column": "label"}, - # TODO: support input event - "EVENT": {"table": "customs", "column": "name"} -} - - -def path_analysis(project_id, startTimestamp=TimeUTC.now(delta_days=-1), endTimestamp=TimeUTC.now(), filters=[], - **args): - event_start = None - event_table = JOURNEY_TYPES["CLICK"]["table"] - event_column = JOURNEY_TYPES["CLICK"]["column"] - extra_values = {} - meta_condition = [] - # TODO: support multi-value - for f in filters: - if f["type"] == "START_POINT": - event_start = f["value"] - elif f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append(f"sessions_metadata.project_id = %(project_id)s") - meta_condition.append(f"sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") - meta_condition.append(f"sessions_metadata.datetime < toDateTime(%(endTimestamp)s / 1000)") - extra_values["user_id"] = f["value"] - ch_sub_query = __get_basic_constraints(table_name=event_table, data=args) - meta_condition += __get_meta_constraint(args) - ch_sub_query += meta_condition - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT source_event, - target_event, - count(*) AS value - FROM (SELECT toString(event_number) || '_' || value AS target_event, - lagInFrame(toString(event_number) || '_' || value) OVER (PARTITION BY session_rank ORDER BY datetime ASC ROWS - BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS source_event - FROM (SELECT session_rank, - datetime, - value, - row_number AS event_number - FROM (SELECT session_rank, - groupArray(datetime) AS arr_datetime, - groupArray(value) AS arr_value, - arrayEnumerate(arr_datetime) AS row_number - {f"FROM (SELECT * FROM (SELECT *, MIN(mark) OVER ( PARTITION BY session_id , session_rank ORDER BY datetime ) AS max FROM (SELECT *, CASE WHEN value = %(event_start)s THEN datetime ELSE NULL END as mark" if event_start else ""} - FROM (SELECT session_id, - datetime, - value, - SUM(new_session) OVER (ORDER BY session_id, datetime) AS session_rank - FROM (SELECT *, - if(equals(source_timestamp, '1970-01-01'), 1, 0) AS new_session - FROM (SELECT session_id, - datetime, - {event_column} AS value, - lagInFrame(datetime) OVER (PARTITION BY session_id ORDER BY datetime ASC ROWS - BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS source_timestamp - FROM {event_table} {"INNER JOIN sessions_metadata USING(session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - ORDER BY session_id, datetime) AS related_events) AS ranked_events - ORDER BY session_rank, datetime - ) AS processed - {") AS marked) AS maxed WHERE datetime >= max) AS filtered" if event_start else ""} - GROUP BY session_rank - ORDER BY session_rank) - ARRAY JOIN - arr_datetime AS datetime, - arr_value AS value, - row_number - ORDER BY session_rank ASC, - row_number ASC) AS sorted_events - WHERE event_number <= %(JOURNEY_DEPTH)s) AS final - WHERE not empty(source_event) - AND not empty(target_event) - GROUP BY source_event, target_event - ORDER BY value DESC - LIMIT 20;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, "event_start": event_start, "JOURNEY_DEPTH": JOURNEY_DEPTH, - **__get_constraint_values(args), **extra_values} - - rows = ch.execute(query=ch_query, params=params) - # print(ch_query % params) - return __transform_journey(rows) - - -def __compute_weekly_percentage(rows): - if rows is None or len(rows) == 0: - return rows - t = -1 +def __transform_journey2(rows, reverse_path=False): + # nodes should contain duplicates for different steps otherwise the UI crashes + nodes = [] + nodes_values = [] + links = [] for r in rows: - if r["week"] == 0: - t = r["usersCount"] - r["percentage"] = r["usersCount"] / t - return rows - - -def __complete_retention(rows, start_date, end_date=None): - if rows is None: - return [] - max_week = 10 - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - if i < len(rows) \ - and i != rows[i]["week"]: - rows.insert(i, neutral) - elif i >= len(rows): - rows.append(neutral) - return rows - - -def __complete_acquisition(rows, start_date, end_date=None): - if rows is None: - return [] - max_week = 10 - week = 0 - delta_date = 0 - while max_week > 0: - start_date += TimeUTC.MS_WEEK - if end_date is not None and start_date >= end_date: - break - delta = 0 - if delta_date + week >= len(rows) \ - or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date: - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - rows.insert(delta_date + week + i, neutral) - delta = i - else: - for i in range(max_week): - if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: - break - - neutral = { - "firstConnexionWeek": start_date, - "week": i, - "usersCount": 0, - "connectedUsers": [], - "percentage": 0 - } - if delta_date + week + i < len(rows) \ - and i != rows[delta_date + week + i]["week"]: - rows.insert(delta_date + week + i, neutral) - elif delta_date + week + i >= len(rows): - rows.append(neutral) - delta = i - week += delta - max_week -= 1 - delta_date += 1 - return rows - - -def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - ch_sub_query = __get_basic_constraints(table_name='sessions_metadata', data=args) - meta_condition = __get_meta_constraint(args) - ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") - ch_sub_query.append("not empty(sessions_metadata.user_id)") - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, - COUNT(all_connexions.user_id) AS users_count, - groupArray(100)(all_connexions.user_id) AS connected_users - FROM (SELECT DISTINCT user_id - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - AND toStartOfWeek(sessions_metadata.datetime,1) = toDate(%(startTimestamp)s / 1000) - AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND isNull((SELECT 1 - FROM sessions_metadata AS bmsess - WHERE bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bmsess.project_id = %(project_id)s - AND bmsess.user_id = sessions_metadata.user_id - LIMIT 1)) - ) AS users_list - INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - ) AS all_connexions USING (user_id) - GROUP BY connexion_week - ORDER BY connexion_week;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(ch_query % params) - rows = ch.execute(ch_query, params) - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - ch_sub_query = __get_basic_constraints(table_name='sessions_metadata', data=args) - meta_condition = __get_meta_constraint(args) - ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") - ch_sub_query.append("not empty(sessions_metadata.user_id)") - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, - week, - users_count, - connected_users - FROM ( - SELECT first_connexion_week, - toInt8((connexion_week - first_connexion_week) / 7) AS week, - COUNT(DISTINCT all_connexions.user_id) AS users_count, - groupArray(20)(all_connexions.user_id) AS connected_users - FROM (SELECT user_id, MIN(toStartOfWeek(sessions_metadata.datetime, 1)) AS first_connexion_week - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND isNull((SELECT 1 - FROM sessions_metadata AS bmsess - WHERE bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bmsess.project_id = %(project_id)s - AND bmsess.user_id = sessions_metadata.user_id - LIMIT 1)) - GROUP BY user_id) AS users_list - INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - ORDER BY connexion_week, user_id - ) AS all_connexions USING (user_id) - WHERE first_connexion_week <= connexion_week - GROUP BY first_connexion_week, week - ORDER BY first_connexion_week, week - ) AS full_data;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(ch_query % params) - rows = ch.execute(ch_query, params) - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - ch_sub_query = __get_basic_constraints(table_name='feature', data=args) - meta_condition = __get_meta_constraint(args) - event_type = "PAGES" - event_value = "/" - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - - with ch_client.ClickHouseClient() as ch: - if default: - # get most used value - ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature - {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query% params) - row = ch.execute(ch_query, params) - if len(row) > 0: - event_value = row[0]["value"] + source = f"{r['event_number_in_session']}_{r['event_type']}_{r['e_value']}" + if source not in nodes: + nodes.append(source) + # TODO: remove this after UI supports long values + nodes_values.append({"name": r['e_value'][:10], "eventType": r['event_type']}) + if r['next_value']: + target = f"{r['event_number_in_session'] + 1}_{r['next_type']}_{r['next_value']}" + if target not in nodes: + nodes.append(target) + # TODO: remove this after UI supports long values + nodes_values.append({"name": r['next_value'][:10], "eventType": r['next_type']}) + link = {"eventType": r['event_type'], "value": r["sessions_count"], + "avgTimeToTarget": r["avg_time_to_target"]} + if not reverse_path: + link["source"] = nodes.index(source) + link["target"] = nodes.index(target) else: - print(f"no {event_table} most used value") - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": ""}], - "chart": __complete_retention(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) - } - extra_values["value"] = event_value - if len(meta_condition) == 0: - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - ch_sub_query += meta_condition - ch_sub_query.append(f"feature.{event_column} = %(value)s") - ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, - COUNT(DISTINCT all_connexions.user_id) AS users_count, - groupArray(100)(all_connexions.user_id) AS connected_users - FROM (SELECT DISTINCT user_id - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND toStartOfWeek(feature.datetime,1) = toDate(%(startTimestamp)s / 1000) - AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND isNull((SELECT 1 - FROM {event_table} AS bsess INNER JOIN sessions_metadata AS bmsess USING (session_id) - WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bsess.project_id = %(project_id)s - AND bmsess.project_id = %(project_id)s - AND bmsess.user_id = sessions_metadata.user_id - AND bsess.{event_column}=%(value)s - LIMIT 1)) - ) AS users_list - INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - ORDER BY connexion_week, user_id - ) AS all_connexions USING (user_id) - GROUP BY connexion_week - ORDER BY connexion_week;""" + link["source"] = nodes.index(target) + link["target"] = nodes.index(source) + links.append(link) - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - rows = ch.execute(ch_query, params) - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } + return {"nodes": nodes_values, + "links": sorted(links, key=lambda x: x["value"], reverse=True)} -def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - ch_sub_query = __get_basic_constraints(table_name='feature', data=args) - meta_condition = __get_meta_constraint(args) - - event_type = "PAGES" - event_value = "/" - extra_values = {} - default = True - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - with ch_client.ClickHouseClient() as ch: - if default: - # get most used value - ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature - {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query% params) - row = ch.execute(ch_query, params) - if len(row) > 0: - event_value = row[0]["value"] - else: - print(f"no {event_table} most used value") - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": ""}], - "chart": __complete_acquisition(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) - } - extra_values["value"] = event_value - - if len(meta_condition) == 0: - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - - ch_sub_query += meta_condition - ch_sub_query.append(f"feature.{event_column} = %(value)s") - ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, - week, - users_count, - connected_users - FROM ( - SELECT first_connexion_week, - toInt8((connexion_week - first_connexion_week) / 7) AS week, - COUNT(DISTINCT all_connexions.user_id) AS users_count, - groupArray(100)(all_connexions.user_id) AS connected_users - FROM (SELECT user_id, MIN(toStartOfWeek(feature.datetime, 1)) AS first_connexion_week - FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) - AND isNull((SELECT 1 - FROM sessions_metadata AS bmsess - INNER JOIN {event_table} AS bsess USING (session_id) - WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) - AND bsess.project_id = %(project_id)s - AND bmsess.project_id = %(project_id)s - AND bmsess.user_id = sessions_metadata.user_id - AND bsess.{event_column} = %(value)s - LIMIT 1)) - GROUP BY user_id) AS users_list - INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week - FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - ORDER BY connexion_week, user_id - ) AS all_connexions USING (user_id) - WHERE first_connexion_week <= connexion_week - GROUP BY first_connexion_week, week - ORDER BY first_connexion_week, week - ) AS full_data;""" - - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - rows = ch.execute(ch_query, params) - rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) - } - - -def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - startTimestamp = TimeUTC.trunc_week(startTimestamp) - endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK - ch_sub_query = __get_basic_constraints(table_name='feature', data=args) - meta_condition = __get_meta_constraint(args) - - event_table = JOURNEY_TYPES["CLICK"]["table"] - event_column = JOURNEY_TYPES["CLICK"]["column"] - extra_values = {} - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - extra_values["user_id"] = f["value"] - - with ch_client.ClickHouseClient() as ch: - if len(meta_condition) == 0: - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - ch_sub_query += meta_condition - ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM sessions_metadata - WHERE {" AND ".join(meta_condition)};""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - # print("---------------------") - all_user_count = ch.execute(ch_query, params) - if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: - return [] - all_user_count = all_user_count[0]["count"] - ch_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND length({event_column})>2 - GROUP BY value - ORDER BY count DESC - LIMIT 7;""" - - # print(ch_query % params) - # print("---------------------") - popularity = ch.execute(ch_query, params) - params["values"] = [p["value"] for p in popularity] - if len(params["values"]) == 0: - return [] - ch_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - AND {event_column} IN %(values)s - GROUP BY value;""" - - # print(ch_query % params) - # print("---------------------") - frequencies = ch.execute(ch_query, params) - total_usage = sum([f["count"] for f in frequencies]) - frequencies = {f["value"]: f["count"] for f in frequencies} - for p in popularity: - p["popularity"] = p.pop("count") / all_user_count - p["frequency"] = frequencies[p["value"]] / total_usage - - return popularity - - -def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - event_type = "CLICK" - event_value = '/' - extra_values = {} - default = True - meta_condition = [] - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - - ch_sub_query = __get_basic_constraints(table_name='feature', data=args) - meta_condition += __get_meta_constraint(args) - ch_sub_query += meta_condition - with ch_client.ClickHouseClient() as ch: - if default: - # get most used value - ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature - {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - # print("---------------------") - row = ch.execute(ch_query, params) - if len(row) > 0: - event_value = row[0]["value"] - # else: - # print(f"no {event_table} most used value") - # return {"target": 0, "adoption": 0, - # "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": ""}]} - - extra_values["value"] = event_value - - if len(meta_condition) == 0: - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - meta_condition.append("sessions_metadata.user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - ch_sub_query += meta_condition - ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM sessions_metadata - WHERE {" AND ".join(meta_condition)};""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - # print("---------------------") - all_user_count = ch.execute(ch_query, params) - if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: - return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": event_value}], } - all_user_count = all_user_count[0]["count"] - - ch_sub_query.append(f"feature.{event_column} = %(value)s") - ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)};""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - # print("---------------------") - adoption = ch.execute(ch_query, params) - adoption = adoption[0]["count"] / all_user_count - return {"target": all_user_count, "adoption": adoption, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} - - -def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - event_type = "CLICK" - event_value = '/' - extra_values = {} - default = True - meta_condition = [] - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("user_id IS NOT NULL") - meta_condition.append("not empty(sessions_metadata.user_id)") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - ch_sub_query = __get_basic_constraints(table_name='feature', data=args) - meta_condition += __get_meta_constraint(args) - ch_sub_query += meta_condition - - with ch_client.ClickHouseClient() as ch: - if default: - # get most used value - ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature - {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - row = ch.execute(ch_query, params) - if len(row) > 0: - event_value = row[0]["value"] - else: - print(f"no {event_table} most used value") - return {"users": [], - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": ""}]} - - extra_values["value"] = event_value - if len(meta_condition) == 0: - ch_sub_query.append("user_id IS NOT NULL") - ch_sub_query.append("not empty(sessions_metadata.user_id)") - ch_sub_query.append("sessions_metadata.project_id = %(project_id)s") - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - ch_sub_query.append(f"feature.{event_column} = %(value)s") - ch_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - GROUP BY user_id - ORDER BY count DESC - LIMIT 10;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - rows = ch.execute(ch_query, params) - return {"users": helper.list_to_camel_case(rows), - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} - - -def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), - filters=[], **args): - event_type = "CLICK" - event_value = '/' - extra_values = {} - default = True - meta_condition = [] - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - ch_sub_query = __get_basic_constraints(table_name="feature", data=args) - meta_condition += __get_meta_constraint(args) - ch_sub_query += meta_condition - with ch_client.ClickHouseClient() as ch: - if default: - # get most used value - ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - AND length({event_column}) > 2 - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - row = ch.execute(ch_query, params) - if len(row) > 0: - event_value = row[0]["value"] - else: - print(f"no {event_table} most used value") - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": ""}], - "chart": __complete_acquisition(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) - } - extra_values["value"] = event_value - ch_sub_query.append(f"feature.{event_column} = %(value)s") - ch_query = f"""SELECT toUnixTimestamp(day)*1000 AS timestamp, count - FROM (SELECT toStartOfDay(feature.datetime) AS day, COUNT(DISTINCT session_id) AS count - FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY day - ORDER BY day) AS raw_results;""" - params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - rows = ch.execute(ch_query, params) - return {"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, - density=(endTimestamp - startTimestamp) // TimeUTC.MS_DAY, - neutral={"count": 0}), - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} - - -def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): - event_table = JOURNEY_TYPES["CLICK"]["table"] - event_column = JOURNEY_TYPES["CLICK"]["column"] - extra_values = {} - meta_condition = [] - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_table = JOURNEY_TYPES[f["value"]]["table"] - event_column = JOURNEY_TYPES[f["value"]]["column"] - elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - extra_values["user_id"] = f["value"] - ch_sub_query = __get_basic_constraints(table_name="feature", data=args) - meta_condition += __get_meta_constraint(args) - ch_sub_query += meta_condition - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg - FROM {event_table} AS feature - {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY value - ORDER BY avg DESC - LIMIT 7;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - # print(ch_query % params) - rows = ch.execute(ch_query, params) - - return rows - - -PERIOD_TO_FUNCTION = { - "DAY": "toStartOfDay", - "WEEK": "toStartOfWeek" +JOURNEY_TYPES = { + schemas.ProductAnalyticsSelectedEventType.location: {"eventType": "LOCATION", "column": "url_path"}, + schemas.ProductAnalyticsSelectedEventType.click: {"eventType": "CLICK", "column": "label"}, + schemas.ProductAnalyticsSelectedEventType.input: {"eventType": "INPUT", "column": "label"}, + schemas.ProductAnalyticsSelectedEventType.custom_event: {"eventType": "CUSTOM", "column": "name"} } -def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): - meta_condition = __get_meta_constraint(args) - period = "DAY" - extra_values = {} - for f in filters: - if f["type"] == "PERIOD" and f["value"] in ["DAY", "WEEK"]: - period = f["value"] - elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - extra_values["user_id"] = f["value"] - period_function = PERIOD_TO_FUNCTION[period] - ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) - ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") - ch_sub_query.append("not empty(sessions_metadata.user_id)") - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT SUM(count) / intDiv(%(endTimestamp)s - %(startTimestamp)s, %(step_size)s) AS avg - FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - GROUP BY period) AS daily_users;""" - params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, - "project_id": project_id, - "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( - startTimestamp), "endTimestamp": endTimestamp, **__get_constraint_values(args), - **extra_values} - # print(ch_query % params) - # print("---------------------") - avg = ch.execute(ch_query, params) - if len(avg) == 0 or avg[0]["avg"] == 0: - return {"avg": 0, "chart": []} - avg = avg[0]["avg"] - # TODO: optimize this when DB structure changes, optimization from 3s to 1s - ch_query = f"""SELECT toUnixTimestamp(toDateTime(period))*1000 AS timestamp, count - FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - GROUP BY period - ORDER BY period) AS raw_results;""" - # print(ch_query % params) - # print("---------------------") - rows = ch.execute(ch_query, params) - return {"avg": avg, "chart": rows} - - -def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): - ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) - meta_condition = __get_meta_constraint(args) - ch_sub_query += meta_condition - ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") - ch_sub_query.append("not empty(sessions_metadata.user_id)") - - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT ifNotFinite(AVG(count),0) AS avg - FROM(SELECT COUNT(user_id) AS count - FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - GROUP BY user_id) AS users_connexions - GROUP BY number_of_days - ORDER BY number_of_days) AS results;""" - params = {"project_id": project_id, - "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} - # print(ch_query % params) - # print("---------------------") - avg = ch.execute(ch_query, params) - if len(avg) == 0 or avg[0]["avg"] == 0: - return {"avg": 0, "partition": []} - avg = avg[0]["avg"] - ch_query = f"""SELECT number_of_days, COUNT(user_id) AS count - FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days - FROM sessions_metadata - WHERE {" AND ".join(ch_sub_query)} - GROUP BY user_id) AS users_connexions - GROUP BY number_of_days - ORDER BY number_of_days;""" - - # print(ch_query % params) - # print("---------------------") - rows = ch.execute(ch_query, params) - - return {"avg": avg, "partition": helper.list_to_camel_case(rows)} - - -def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], - **args): - ch_sub_query = __get_basic_constraints(table_name="feature", data=args) - event_type = "PAGES" - event_value = "/" - extra_values = {} - default = True - meta_condition = [] - for f in filters: - if f["type"] == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): - event_type = f["value"] - elif f["type"] == "EVENT_VALUE": - event_value = f["value"] - default = False - elif f["type"] in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: - meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") - meta_condition.append("sessions_metadata.project_id = %(project_id)s") - meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - extra_values["user_id"] = f["value"] - event_table = JOURNEY_TYPES[event_type]["table"] - event_column = JOURNEY_TYPES[event_type]["column"] - - meta_condition += __get_meta_constraint(args) - ch_sub_query += meta_condition - with ch_client.ClickHouseClient() as ch: - if default: - # get most used value - ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count - FROM {event_table} AS feature - {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} - WHERE {" AND ".join(ch_sub_query)} - GROUP BY value - ORDER BY count DESC - LIMIT 1;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - row = ch.execute(ch_query, params) - if len(row) > 0: - event_value = row[0]["value"] - else: - print(f"no {event_table} most used value") - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, - {"type": "EVENT_VALUE", "value": ""}], - "list": [] - } - extra_values["value"] = event_value - if len(meta_condition) == 0: - ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") - ch_sub_query.append("not empty(sessions_metadata.user_id)") - ch_sub_query.append("sessions_metadata.project_id = %(project_id)s") - ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") - ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") - ch_sub_query.append(f"feature.{event_column} = %(value)s") - ch_query = f"""SELECT user_id, - toUnixTimestamp(last_time)*1000 AS last_time, - interactions_count, - toUnixTimestamp(first_seen) * 1000 AS first_seen, - toUnixTimestamp(last_seen) * 1000 AS last_seen - FROM (SELECT user_id, last_time, interactions_count, MIN(datetime) AS first_seen, MAX(datetime) AS last_seen - FROM (SELECT user_id, MAX(datetime) AS last_time, COUNT(DISTINCT session_id) AS interactions_count - FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) - WHERE {" AND ".join(ch_sub_query)} - GROUP BY user_id ) AS user_last_usage INNER JOIN sessions_metadata USING (user_id) - WHERE now() - last_time > 7 - GROUP BY user_id, last_time, interactions_count - ORDER BY interactions_count DESC, last_time DESC - LIMIT 50) AS raw_results;""" - params = {"project_id": project_id, "startTimestamp": startTimestamp, - "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} - print(ch_query % params) - rows = ch.execute(ch_query, params) - return { - "startTimestamp": startTimestamp, - "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], - "list": helper.list_to_camel_case(rows) - } - - -def search(text, feature_type, project_id, platform=None): - if not feature_type: - resource_type = "ALL" - data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform) - return data - args = {} if platform is None else {"platform": platform} - ch_sub_query = __get_basic_constraints(table_name="feature", data=args) - meta_condition = __get_meta_constraint(args) - ch_sub_query += meta_condition - params = {"startTimestamp": TimeUTC.now() - 1 * TimeUTC.MS_MONTH, - "endTimestamp": TimeUTC.now(), - "project_id": project_id, - "value": text.lower(), - "platform_0": platform} - if feature_type == "ALL": - with ch_client.ClickHouseClient() as ch: - sub_queries = [] - for e in JOURNEY_TYPES: - sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" - FROM {JOURNEY_TYPES[e]["table"]} AS feature - WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[e]["column"]},%(value)s)!=0 - LIMIT 10)""") - ch_query = "UNION ALL".join(sub_queries) - print(ch_query % params) - rows = ch.execute(ch_query, params) - elif JOURNEY_TYPES.get(feature_type) is not None: - with ch_client.ClickHouseClient() as ch: - ch_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" - FROM {JOURNEY_TYPES[feature_type]["table"]} AS feature - WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[feature_type]["column"]},%(value)s)!=0 - LIMIT 10;""" - print(ch_query % params) - rows = ch.execute(ch_query, params) +def path_analysis(project_id: int, data: schemas.PathAnalysisSchema, + selected_event_type: List[schemas.ProductAnalyticsSelectedEventType], + density: int = 4, hide_minor_paths: bool = False): + sub_events = [] + start_points_conditions = [] + if len(selected_event_type) == 0: + selected_event_type.append(schemas.ProductAnalyticsSelectedEventType.location) + sub_events.append({"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["column"], + "eventType": schemas.ProductAnalyticsSelectedEventType.location.value}) else: - return [] - return [helper.dict_to_camel_case(row) for row in rows] + for v in selected_event_type: + if JOURNEY_TYPES.get(v): + sub_events.append({"column": JOURNEY_TYPES[v]["column"], + "eventType": JOURNEY_TYPES[v]["eventType"]}) + if len(sub_events) == 1: + main_column = sub_events[0]['column'] + else: + main_column = f"multiIf(%s,%s)" % ( + ','.join([f"event_type='{s['eventType']}',{s['column']}" for s in sub_events[:-1]]), + sub_events[-1]["column"]) + extra_values = {} + sessions_conditions = [] + reverse = False + meta_keys = None + exclusions = {} + for i, f in enumerate(data.filters): + op = sh.get_sql_operator(f.operator) + is_any = sh.isAny_opreator(f.operator) + is_not = sh.is_negation_operator(f.operator) + is_undefined = sh.isUndefined_operator(f.operator) + f_k = f"f_value_{i}" + extra_values = {**extra_values, **sh.multi_values(f.value, value_key=f_k)} + + if f.type in [schemas.ProductAnalyticsFilterType.start_point, schemas.ProductAnalyticsFilterType.end_point]: + for sf in f.filters: + extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)} + start_points_conditions.append(f"(event_type='{JOURNEY_TYPES[sf.type]['eventType']}' AND " + + sh.multi_conditions(f'e_value {op} %({f_k})s', sf.value, is_not=is_not, + value_key=f_k) + + ")") + + reverse = f.type == schemas.ProductAnalyticsFilterType.end_point + elif f.type == schemas.ProductAnalyticsFilterType.exclude: + for sf in f.filters: + if sf.type in selected_event_type: + extra_values = {**extra_values, **sh.multi_values(sf.value, value_key=f_k)} + exclusions[sf.type] = [ + sh.multi_conditions(f'{JOURNEY_TYPES[sf.type]["column"]} != %({f_k})s', sf.value, is_not=True, + value_key=f_k)] + + # ---- meta-filters + if f.type == schemas.FilterType.user_browser: + if is_any: + sessions_conditions.append('isNotNull(user_browser)') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.user_os]: + if is_any: + sessions_conditions.append('isNotNull(user_os)') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.user_device]: + if is_any: + sessions_conditions.append('isNotNull(user_device)') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.user_country]: + if is_any: + sessions_conditions.append('isNotNull(user_country)') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type == schemas.FilterType.user_city: + if is_any: + sessions_conditions.append('isNotNull(user_city)') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type == schemas.FilterType.user_state: + if is_any: + sessions_conditions.append('isNotNull(user_state)') + else: + sessions_conditions.append( + sh.multi_conditions(f'user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.utm_source]: + if is_any: + sessions_conditions.append('isNotNull(utm_source)') + elif is_undefined: + sessions_conditions.append('isNull(utm_source)') + else: + sessions_conditions.append( + sh.multi_conditions(f'utm_source {op} %({f_k})s::text', f.value, is_not=is_not, + value_key=f_k)) + + elif f.type in [schemas.FilterType.utm_medium]: + if is_any: + sessions_conditions.append('isNotNull(utm_medium)') + elif is_undefined: + sessions_conditions.append('isNull(utm_medium)') + else: + sessions_conditions.append( + sh.multi_conditions(f'utm_medium {op} %({f_k})s::text', f.value, is_not=is_not, + value_key=f_k)) + + elif f.type in [schemas.FilterType.utm_campaign]: + if is_any: + sessions_conditions.append('isNotNull(utm_campaign)') + elif is_undefined: + sessions_conditions.append('isNull(utm_campaign)') + else: + sessions_conditions.append( + sh.multi_conditions(f'utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not, + value_key=f_k)) + + elif f.type == schemas.FilterType.duration: + if len(f.value) > 0 and f.value[0] is not None: + sessions_conditions.append("duration >= %(minDuration)s") + extra_values["minDuration"] = f.value[0] + if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0: + sessions_conditions.append("duration <= %(maxDuration)s") + extra_values["maxDuration"] = f.value[1] + elif f.type == schemas.FilterType.referrer: + # extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" + if is_any: + sessions_conditions.append('isNotNull(base_referrer)') + else: + sessions_conditions.append( + sh.multi_conditions(f"base_referrer {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + elif f.type == schemas.FilterType.metadata: + # get metadata list only if you need it + if meta_keys is None: + meta_keys = metadata.get(project_id=project_id) + meta_keys = {m["key"]: m["index"] for m in meta_keys} + if f.source in meta_keys.keys(): + if is_any: + sessions_conditions.append(f"isNotNull({metadata.index_to_colname(meta_keys[f.source])})") + elif is_undefined: + sessions_conditions.append(f"isNull({metadata.index_to_colname(meta_keys[f.source])})") + else: + sessions_conditions.append( + sh.multi_conditions( + f"{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text", + f.value, is_not=is_not, value_key=f_k)) + + elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: + if is_any: + sessions_conditions.append('isNotNull(user_id)') + elif is_undefined: + sessions_conditions.append('isNull(user_id)') + else: + sessions_conditions.append( + sh.multi_conditions(f"s.user_id {op} %({f_k})s::text", f.value, is_not=is_not, + value_key=f_k)) + + elif f.type in [schemas.FilterType.user_anonymous_id, + schemas.FilterType.user_anonymous_id_ios]: + if is_any: + sessions_conditions.append('isNotNull(user_anonymous_id)') + elif is_undefined: + sessions_conditions.append('isNull(user_anonymous_id)') + else: + sessions_conditions.append( + sh.multi_conditions(f"user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not, + value_key=f_k)) + + elif f.type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_ios]: + if is_any: + sessions_conditions.append('isNotNull(rev_id)') + elif is_undefined: + sessions_conditions.append('isNull(rev_id)') + else: + sessions_conditions.append( + sh.multi_conditions(f"rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) + + elif f.type == schemas.FilterType.platform: + # op = __ sh.get_sql_operator(f.operator) + sessions_conditions.append( + sh.multi_conditions(f"user_device_type {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + + elif f.type == schemas.FilterType.issue: + if is_any: + sessions_conditions.append("array_length(issue_types, 1) > 0") + else: + sessions_conditions.append( + sh.multi_conditions(f"%({f_k})s {op} ANY (issue_types)", f.value, is_not=is_not, + value_key=f_k)) + + elif f.type == schemas.FilterType.events_count: + sessions_conditions.append( + sh.multi_conditions(f"events_count {op} %({f_k})s", f.value, is_not=is_not, + value_key=f_k)) + + # ch_sub_query = __get_basic_constraints(table_name="experimental.events", data=data.model_dump()) + ch_sub_query = __get_basic_constraints(table_name="events") + selected_event_type_sub_query = [] + for s in selected_event_type: + selected_event_type_sub_query.append(f"events.event_type = '{JOURNEY_TYPES[s]['eventType']}'") + if s in exclusions: + selected_event_type_sub_query[-1] += " AND (" + " AND ".join(exclusions[s]) + ")" + selected_event_type_sub_query = " OR ".join(selected_event_type_sub_query) + ch_sub_query.append(f"({selected_event_type_sub_query})") + + main_table = "experimental.events" + if len(sessions_conditions) > 0: + sessions_conditions.append(f"sessions.project_id = %(project_id)s") + sessions_conditions.append(f"sessions.datetime >= toDateTime(%(startTimestamp)s / 1000)") + sessions_conditions.append(f"sessions.datetime < toDateTime(%(endTimestamp)s / 1000)") + sessions_conditions.append("sessions.events_count>1") + sessions_conditions.append("sessions.duration>0") + main_table = f"""(SELECT DISTINCT session_id + FROM sessions + WHERE {" AND ".join(sessions_conditions)}) AS sub_sessions + INNER JOIN events USING (session_id)""" + if len(start_points_conditions) == 0: + start_points_subquery = """SELECT DISTINCT session_id + FROM (SELECT event_type, e_value + FROM full_ranked_events + WHERE event_number_in_session = 1 + GROUP BY event_type, e_value + ORDER BY count(1) DESC + LIMIT 2) AS top_start_events + INNER JOIN full_ranked_events + ON (top_start_events.event_type = full_ranked_events.event_type AND + top_start_events.e_value = full_ranked_events.e_value AND + full_ranked_events.event_number_in_session = 1 AND + isNotNull(next_value))""" + else: + start_points_conditions = ["(" + " OR ".join(start_points_conditions) + ")", + "event_number_in_session = 1", + "isNotNull(next_value)"] + start_points_subquery = f"""SELECT DISTINCT session_id + FROM full_ranked_events + WHERE {" AND ".join(start_points_conditions)}""" + del start_points_conditions + if reverse: + path_direction = "DESC" + else: + path_direction = "" + + with ch_client.ClickHouseClient(database="experimental") as ch: + ch_query = f"""\ +WITH full_ranked_events AS (SELECT session_id, + event_type, + {main_column} AS e_value, + row_number() OVER (PARTITION BY session_id ORDER BY datetime {path_direction},message_id {path_direction}) AS event_number_in_session, + leadInFrame(label) + OVER (PARTITION BY session_id ORDER BY datetime {path_direction},message_id {path_direction} ROWS + BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_value, + leadInFrame(toNullable(event_type)) + OVER (PARTITION BY session_id ORDER BY datetime {path_direction},message_id {path_direction} ROWS + BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) AS next_type, + abs(leadInFrame(toNullable(datetime)) + OVER (PARTITION BY session_id ORDER BY datetime {path_direction},message_id {path_direction} ROWS + BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) - + events.datetime) AS time_to_next + FROM {main_table} + WHERE {" AND ".join(ch_sub_query)}) +SELECT event_number_in_session, + event_type, + e_value, + next_type, + next_value, + sessions_count, + avg(time_to_next) AS avg_time_to_target +FROM (SELECT * + FROM (SELECT *, + row_number() + OVER (PARTITION BY event_number_in_session, event_type, e_value ORDER BY sessions_count DESC ) AS _event_number_in_group + FROM (SELECT event_number_in_session, + event_type, + e_value, + next_type, + next_value, + time_to_next, + count(1) AS sessions_count + FROM ({start_points_subquery}) AS start_points + INNER JOIN full_ranked_events USING (session_id) + GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, + time_to_next) AS groupped_events) AS ranked_groupped_events + WHERE _event_number_in_group < 9) AS limited_events +GROUP BY event_number_in_session, event_type, e_value, next_type, next_value, sessions_count +ORDER BY event_number_in_session, e_value, next_value;""" + params = {"project_id": project_id, "startTimestamp": data.startTimestamp, + "endTimestamp": data.endTimestamp, + # **__get_constraint_values(args), + **extra_values} + + _now = time() + rows = ch.execute(query=ch_query, params=params) + if time() - _now > 3: + print(f">>>>>>>>>PathAnalysis long query EE ({int(time() - _now)}s)<<<<<<<<<") + print("----------------------") + print(print(ch.format(ch_query, params))) + print("----------------------") + return __transform_journey2(rows=rows, reverse_path=reverse) + +# +# def __compute_weekly_percentage(rows): +# if rows is None or len(rows) == 0: +# return rows +# t = -1 +# for r in rows: +# if r["week"] == 0: +# t = r["usersCount"] +# r["percentage"] = r["usersCount"] / t +# return rows +# +# +# def __complete_retention(rows, start_date, end_date=None): +# if rows is None: +# return [] +# max_week = 10 +# for i in range(max_week): +# if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: +# break +# neutral = { +# "firstConnexionWeek": start_date, +# "week": i, +# "usersCount": 0, +# "connectedUsers": [], +# "percentage": 0 +# } +# if i < len(rows) \ +# and i != rows[i]["week"]: +# rows.insert(i, neutral) +# elif i >= len(rows): +# rows.append(neutral) +# return rows +# +# +# def __complete_acquisition(rows, start_date, end_date=None): +# if rows is None: +# return [] +# max_week = 10 +# week = 0 +# delta_date = 0 +# while max_week > 0: +# start_date += TimeUTC.MS_WEEK +# if end_date is not None and start_date >= end_date: +# break +# delta = 0 +# if delta_date + week >= len(rows) \ +# or delta_date + week < len(rows) and rows[delta_date + week]["firstConnexionWeek"] > start_date: +# for i in range(max_week): +# if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: +# break +# +# neutral = { +# "firstConnexionWeek": start_date, +# "week": i, +# "usersCount": 0, +# "connectedUsers": [], +# "percentage": 0 +# } +# rows.insert(delta_date + week + i, neutral) +# delta = i +# else: +# for i in range(max_week): +# if end_date is not None and start_date + i * TimeUTC.MS_WEEK >= end_date: +# break +# +# neutral = { +# "firstConnexionWeek": start_date, +# "week": i, +# "usersCount": 0, +# "connectedUsers": [], +# "percentage": 0 +# } +# if delta_date + week + i < len(rows) \ +# and i != rows[delta_date + week + i]["week"]: +# rows.insert(delta_date + week + i, neutral) +# elif delta_date + week + i >= len(rows): +# rows.append(neutral) +# delta = i +# week += delta +# max_week -= 1 +# delta_date += 1 +# return rows +# +# +# def users_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], +# **args): +# startTimestamp = TimeUTC.trunc_week(startTimestamp) +# endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK +# ch_sub_query = __get_basic_constraints(table_name='sessions_metadata', data=args) +# meta_condition = __get_meta_constraint(args) +# ch_sub_query += meta_condition +# ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") +# ch_sub_query.append("not empty(sessions_metadata.user_id)") +# with ch_client.ClickHouseClient() as ch: +# ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, +# COUNT(all_connexions.user_id) AS users_count, +# groupArray(100)(all_connexions.user_id) AS connected_users +# FROM (SELECT DISTINCT user_id +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# AND toStartOfWeek(sessions_metadata.datetime,1) = toDate(%(startTimestamp)s / 1000) +# AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) +# AND isNull((SELECT 1 +# FROM sessions_metadata AS bmsess +# WHERE bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) +# AND bmsess.project_id = %(project_id)s +# AND bmsess.user_id = sessions_metadata.user_id +# LIMIT 1)) +# ) AS users_list +# INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# ) AS all_connexions USING (user_id) +# GROUP BY connexion_week +# ORDER BY connexion_week;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args)} +# # print(ch_query % params) +# rows = ch.execute(ch_query, params) +# rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) +# return { +# "startTimestamp": startTimestamp, +# "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# +# +# def users_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# startTimestamp = TimeUTC.trunc_week(startTimestamp) +# endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK +# ch_sub_query = __get_basic_constraints(table_name='sessions_metadata', data=args) +# meta_condition = __get_meta_constraint(args) +# ch_sub_query += meta_condition +# ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") +# ch_sub_query.append("not empty(sessions_metadata.user_id)") +# ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s / 1000)") +# with ch_client.ClickHouseClient() as ch: +# ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, +# week, +# users_count, +# connected_users +# FROM ( +# SELECT first_connexion_week, +# toInt8((connexion_week - first_connexion_week) / 7) AS week, +# COUNT(DISTINCT all_connexions.user_id) AS users_count, +# groupArray(20)(all_connexions.user_id) AS connected_users +# FROM (SELECT user_id, MIN(toStartOfWeek(sessions_metadata.datetime, 1)) AS first_connexion_week +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) +# AND isNull((SELECT 1 +# FROM sessions_metadata AS bmsess +# WHERE bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) +# AND bmsess.project_id = %(project_id)s +# AND bmsess.user_id = sessions_metadata.user_id +# LIMIT 1)) +# GROUP BY user_id) AS users_list +# INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# ORDER BY connexion_week, user_id +# ) AS all_connexions USING (user_id) +# WHERE first_connexion_week <= connexion_week +# GROUP BY first_connexion_week, week +# ORDER BY first_connexion_week, week +# ) AS full_data;""" +# +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args)} +# # print(ch_query % params) +# rows = ch.execute(ch_query, params) +# rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) +# return { +# "startTimestamp": startTimestamp, +# "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# +# +# def feature_retention(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# startTimestamp = TimeUTC.trunc_week(startTimestamp) +# endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK +# ch_sub_query = __get_basic_constraints(table_name='feature', data=args) +# meta_condition = __get_meta_constraint(args) +# event_type = "PAGES" +# event_value = "/" +# extra_values = {} +# default = True +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_type = f["value"] +# elif f.type == "EVENT_VALUE": +# event_value = f["value"] +# default = False +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# extra_values["user_id"] = f["value"] +# event_table = JOURNEY_TYPES[event_type]["table"] +# event_column = JOURNEY_TYPES[event_type]["column"] +# +# with ch_client.ClickHouseClient() as ch: +# if default: +# # get most used value +# ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count +# FROM {event_table} AS feature +# {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY value +# ORDER BY count DESC +# LIMIT 1;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query% params) +# row = ch.execute(ch_query, params) +# if len(row) > 0: +# event_value = row[0]["value"] +# else: +# print(f"no {event_table} most used value") +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, +# {"type": "EVENT_VALUE", "value": ""}], +# "chart": __complete_retention(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# extra_values["value"] = event_value +# if len(meta_condition) == 0: +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# ch_sub_query += meta_condition +# ch_sub_query.append(f"feature.{event_column} = %(value)s") +# ch_query = f"""SELECT toInt8((connexion_week - toDate(%(startTimestamp)s / 1000)) / 7) AS week, +# COUNT(DISTINCT all_connexions.user_id) AS users_count, +# groupArray(100)(all_connexions.user_id) AS connected_users +# FROM (SELECT DISTINCT user_id +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# AND toStartOfWeek(feature.datetime,1) = toDate(%(startTimestamp)s / 1000) +# AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) +# AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) +# AND isNull((SELECT 1 +# FROM {event_table} AS bsess INNER JOIN sessions_metadata AS bmsess USING (session_id) +# WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) +# AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) +# AND bsess.project_id = %(project_id)s +# AND bmsess.project_id = %(project_id)s +# AND bmsess.user_id = sessions_metadata.user_id +# AND bsess.{event_column}=%(value)s +# LIMIT 1)) +# ) AS users_list +# INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime,1) AS connexion_week +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# ORDER BY connexion_week, user_id +# ) AS all_connexions USING (user_id) +# GROUP BY connexion_week +# ORDER BY connexion_week;""" +# +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# print(ch_query % params) +# rows = ch.execute(ch_query, params) +# rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], +# "chart": __complete_retention(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# +# +# def feature_acquisition(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# startTimestamp = TimeUTC.trunc_week(startTimestamp) +# endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK +# ch_sub_query = __get_basic_constraints(table_name='feature', data=args) +# meta_condition = __get_meta_constraint(args) +# +# event_type = "PAGES" +# event_value = "/" +# extra_values = {} +# default = True +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_type = f["value"] +# elif f.type == "EVENT_VALUE": +# event_value = f["value"] +# default = False +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# +# extra_values["user_id"] = f["value"] +# event_table = JOURNEY_TYPES[event_type]["table"] +# event_column = JOURNEY_TYPES[event_type]["column"] +# with ch_client.ClickHouseClient() as ch: +# if default: +# # get most used value +# ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count +# FROM {event_table} AS feature +# {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY value +# ORDER BY count DESC +# LIMIT 1;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query% params) +# row = ch.execute(ch_query, params) +# if len(row) > 0: +# event_value = row[0]["value"] +# else: +# print(f"no {event_table} most used value") +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, +# {"type": "EVENT_VALUE", "value": ""}], +# "chart": __complete_acquisition(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# extra_values["value"] = event_value +# +# if len(meta_condition) == 0: +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# +# ch_sub_query += meta_condition +# ch_sub_query.append(f"feature.{event_column} = %(value)s") +# ch_query = f"""SELECT toUnixTimestamp(toDateTime(first_connexion_week))*1000 AS first_connexion_week, +# week, +# users_count, +# connected_users +# FROM ( +# SELECT first_connexion_week, +# toInt8((connexion_week - first_connexion_week) / 7) AS week, +# COUNT(DISTINCT all_connexions.user_id) AS users_count, +# groupArray(100)(all_connexions.user_id) AS connected_users +# FROM (SELECT user_id, MIN(toStartOfWeek(feature.datetime, 1)) AS first_connexion_week +# FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# AND sessions_metadata.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) +# AND feature.datetime < toDateTime(%(startTimestamp)s/1000 + 8 * 24 * 60 * 60 ) +# AND isNull((SELECT 1 +# FROM sessions_metadata AS bmsess +# INNER JOIN {event_table} AS bsess USING (session_id) +# WHERE bsess.datetime < toDateTime(%(startTimestamp)s / 1000) +# AND bmsess.datetime < toDateTime(%(startTimestamp)s / 1000) +# AND bsess.project_id = %(project_id)s +# AND bmsess.project_id = %(project_id)s +# AND bmsess.user_id = sessions_metadata.user_id +# AND bsess.{event_column} = %(value)s +# LIMIT 1)) +# GROUP BY user_id) AS users_list +# INNER JOIN (SELECT DISTINCT user_id, toStartOfWeek(datetime, 1) AS connexion_week +# FROM sessions_metadata INNER JOIN {event_table} AS feature USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# ORDER BY connexion_week, user_id +# ) AS all_connexions USING (user_id) +# WHERE first_connexion_week <= connexion_week +# GROUP BY first_connexion_week, week +# ORDER BY first_connexion_week, week +# ) AS full_data;""" +# +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# print(ch_query % params) +# rows = ch.execute(ch_query, params) +# rows = __compute_weekly_percentage(helper.list_to_camel_case(rows)) +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], +# "chart": __complete_acquisition(rows=rows, start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# +# +# def feature_popularity_frequency(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# startTimestamp = TimeUTC.trunc_week(startTimestamp) +# endTimestamp = startTimestamp + 10 * TimeUTC.MS_WEEK +# ch_sub_query = __get_basic_constraints(table_name='feature', data=args) +# meta_condition = __get_meta_constraint(args) +# +# event_table = JOURNEY_TYPES["CLICK"]["table"] +# event_column = JOURNEY_TYPES["CLICK"]["column"] +# extra_values = {} +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_table = JOURNEY_TYPES[f["value"]]["table"] +# event_column = JOURNEY_TYPES[f["value"]]["column"] +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# extra_values["user_id"] = f["value"] +# +# with ch_client.ClickHouseClient() as ch: +# if len(meta_condition) == 0: +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# ch_sub_query += meta_condition +# ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count +# FROM sessions_metadata +# WHERE {" AND ".join(meta_condition)};""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# # print("---------------------") +# all_user_count = ch.execute(ch_query, params) +# if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: +# return [] +# all_user_count = all_user_count[0]["count"] +# ch_query = f"""SELECT {event_column} AS value, COUNT(DISTINCT user_id) AS count +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# AND length({event_column})>2 +# GROUP BY value +# ORDER BY count DESC +# LIMIT 7;""" +# +# # print(ch_query % params) +# # print("---------------------") +# popularity = ch.execute(ch_query, params) +# params["values"] = [p["value"] for p in popularity] +# if len(params["values"]) == 0: +# return [] +# ch_query = f"""SELECT {event_column} AS value, COUNT(session_id) AS count +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# AND {event_column} IN %(values)s +# GROUP BY value;""" +# +# # print(ch_query % params) +# # print("---------------------") +# frequencies = ch.execute(ch_query, params) +# total_usage = sum([f["count"] for f in frequencies]) +# frequencies = {f["value"]: f["count"] for f in frequencies} +# for p in popularity: +# p["popularity"] = p.pop("count") / all_user_count +# p["frequency"] = frequencies[p["value"]] / total_usage +# +# return popularity +# +# +# def feature_adoption(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# event_type = "CLICK" +# event_value = '/' +# extra_values = {} +# default = True +# meta_condition = [] +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_type = f["value"] +# elif f.type == "EVENT_VALUE": +# event_value = f["value"] +# default = False +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# extra_values["user_id"] = f["value"] +# event_table = JOURNEY_TYPES[event_type]["table"] +# event_column = JOURNEY_TYPES[event_type]["column"] +# +# ch_sub_query = __get_basic_constraints(table_name='feature', data=args) +# meta_condition += __get_meta_constraint(args) +# ch_sub_query += meta_condition +# with ch_client.ClickHouseClient() as ch: +# if default: +# # get most used value +# ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count +# FROM {event_table} AS feature +# {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY value +# ORDER BY count DESC +# LIMIT 1;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# # print("---------------------") +# row = ch.execute(ch_query, params) +# if len(row) > 0: +# event_value = row[0]["value"] +# # else: +# # print(f"no {event_table} most used value") +# # return {"target": 0, "adoption": 0, +# # "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": ""}]} +# +# extra_values["value"] = event_value +# +# if len(meta_condition) == 0: +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# ch_sub_query += meta_condition +# ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count +# FROM sessions_metadata +# WHERE {" AND ".join(meta_condition)};""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# # print("---------------------") +# all_user_count = ch.execute(ch_query, params) +# if len(all_user_count) == 0 or all_user_count[0]["count"] == 0: +# return {"adoption": 0, "target": 0, "filters": [{"type": "EVENT_TYPE", "value": event_type}, +# {"type": "EVENT_VALUE", "value": event_value}], } +# all_user_count = all_user_count[0]["count"] +# +# ch_sub_query.append(f"feature.{event_column} = %(value)s") +# ch_query = f"""SELECT COUNT(DISTINCT user_id) AS count +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)};""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# # print("---------------------") +# adoption = ch.execute(ch_query, params) +# adoption = adoption[0]["count"] / all_user_count +# return {"target": all_user_count, "adoption": adoption, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} +# +# +# def feature_adoption_top_users(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# event_type = "CLICK" +# event_value = '/' +# extra_values = {} +# default = True +# meta_condition = [] +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_type = f["value"] +# elif f.type == "EVENT_VALUE": +# event_value = f["value"] +# default = False +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("user_id IS NOT NULL") +# meta_condition.append("not empty(sessions_metadata.user_id)") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# extra_values["user_id"] = f["value"] +# event_table = JOURNEY_TYPES[event_type]["table"] +# event_column = JOURNEY_TYPES[event_type]["column"] +# ch_sub_query = __get_basic_constraints(table_name='feature', data=args) +# meta_condition += __get_meta_constraint(args) +# ch_sub_query += meta_condition +# +# with ch_client.ClickHouseClient() as ch: +# if default: +# # get most used value +# ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count +# FROM {event_table} AS feature +# {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY value +# ORDER BY count DESC +# LIMIT 1;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# row = ch.execute(ch_query, params) +# if len(row) > 0: +# event_value = row[0]["value"] +# else: +# print(f"no {event_table} most used value") +# return {"users": [], +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": ""}]} +# +# extra_values["value"] = event_value +# if len(meta_condition) == 0: +# ch_sub_query.append("user_id IS NOT NULL") +# ch_sub_query.append("not empty(sessions_metadata.user_id)") +# ch_sub_query.append("sessions_metadata.project_id = %(project_id)s") +# ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# ch_sub_query.append(f"feature.{event_column} = %(value)s") +# ch_query = f"""SELECT user_id, COUNT(DISTINCT session_id) AS count +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY user_id +# ORDER BY count DESC +# LIMIT 10;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# rows = ch.execute(ch_query, params) +# return {"users": helper.list_to_camel_case(rows), +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} +# +# +# def feature_adoption_daily_usage(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), +# filters=[], **args): +# event_type = "CLICK" +# event_value = '/' +# extra_values = {} +# default = True +# meta_condition = [] +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_type = f["value"] +# elif f.type == "EVENT_VALUE": +# event_value = f["value"] +# default = False +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# +# extra_values["user_id"] = f["value"] +# event_table = JOURNEY_TYPES[event_type]["table"] +# event_column = JOURNEY_TYPES[event_type]["column"] +# ch_sub_query = __get_basic_constraints(table_name="feature", data=args) +# meta_condition += __get_meta_constraint(args) +# ch_sub_query += meta_condition +# with ch_client.ClickHouseClient() as ch: +# if default: +# # get most used value +# ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count +# FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# AND length({event_column}) > 2 +# GROUP BY value +# ORDER BY count DESC +# LIMIT 1;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# row = ch.execute(ch_query, params) +# if len(row) > 0: +# event_value = row[0]["value"] +# else: +# print(f"no {event_table} most used value") +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, +# {"type": "EVENT_VALUE", "value": ""}], +# "chart": __complete_acquisition(rows=[], start_date=startTimestamp, end_date=TimeUTC.now()) +# } +# extra_values["value"] = event_value +# ch_sub_query.append(f"feature.{event_column} = %(value)s") +# ch_query = f"""SELECT toUnixTimestamp(day)*1000 AS timestamp, count +# FROM (SELECT toStartOfDay(feature.datetime) AS day, COUNT(DISTINCT session_id) AS count +# FROM {event_table} AS feature {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY day +# ORDER BY day) AS raw_results;""" +# params = {"step_size": TimeUTC.MS_DAY, "project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# rows = ch.execute(ch_query, params) +# return {"chart": __complete_missing_steps(rows=rows, start_time=startTimestamp, end_time=endTimestamp, +# density=(endTimestamp - startTimestamp) // TimeUTC.MS_DAY, +# neutral={"count": 0}), +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}]} +# +# +# def feature_intensity(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], +# **args): +# event_table = JOURNEY_TYPES["CLICK"]["table"] +# event_column = JOURNEY_TYPES["CLICK"]["column"] +# extra_values = {} +# meta_condition = [] +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_table = JOURNEY_TYPES[f["value"]]["table"] +# event_column = JOURNEY_TYPES[f["value"]]["column"] +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# extra_values["user_id"] = f["value"] +# ch_sub_query = __get_basic_constraints(table_name="feature", data=args) +# meta_condition += __get_meta_constraint(args) +# ch_sub_query += meta_condition +# with ch_client.ClickHouseClient() as ch: +# ch_query = f"""SELECT {event_column} AS value, AVG(DISTINCT session_id) AS avg +# FROM {event_table} AS feature +# {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY value +# ORDER BY avg DESC +# LIMIT 7;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# # print(ch_query % params) +# rows = ch.execute(ch_query, params) +# +# return rows +# +# +# PERIOD_TO_FUNCTION = { +# "DAY": "toStartOfDay", +# "WEEK": "toStartOfWeek" +# } +# +# +# def users_active(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], +# **args): +# meta_condition = __get_meta_constraint(args) +# period = "DAY" +# extra_values = {} +# for f in filters: +# if f.type == "PERIOD" and f["value"] in ["DAY", "WEEK"]: +# period = f["value"] +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# extra_values["user_id"] = f["value"] +# period_function = PERIOD_TO_FUNCTION[period] +# ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) +# ch_sub_query += meta_condition +# ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") +# ch_sub_query.append("not empty(sessions_metadata.user_id)") +# with ch_client.ClickHouseClient() as ch: +# ch_query = f"""SELECT SUM(count) / intDiv(%(endTimestamp)s - %(startTimestamp)s, %(step_size)s) AS avg +# FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY period) AS daily_users;""" +# params = {"step_size": TimeUTC.MS_DAY if period == "DAY" else TimeUTC.MS_WEEK, +# "project_id": project_id, +# "startTimestamp": TimeUTC.trunc_day(startTimestamp) if period == "DAY" else TimeUTC.trunc_week( +# startTimestamp), "endTimestamp": endTimestamp, **__get_constraint_values(args), +# **extra_values} +# # print(ch_query % params) +# # print("---------------------") +# avg = ch.execute(ch_query, params) +# if len(avg) == 0 or avg[0]["avg"] == 0: +# return {"avg": 0, "chart": []} +# avg = avg[0]["avg"] +# # TODO: optimize this when DB structure changes, optimization from 3s to 1s +# ch_query = f"""SELECT toUnixTimestamp(toDateTime(period))*1000 AS timestamp, count +# FROM (SELECT {period_function}(sessions_metadata.datetime) AS period, count(DISTINCT user_id) AS count +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY period +# ORDER BY period) AS raw_results;""" +# # print(ch_query % params) +# # print("---------------------") +# rows = ch.execute(ch_query, params) +# return {"avg": avg, "chart": rows} +# +# +# def users_power(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], **args): +# ch_sub_query = __get_basic_constraints(table_name="sessions_metadata", data=args) +# meta_condition = __get_meta_constraint(args) +# ch_sub_query += meta_condition +# ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") +# ch_sub_query.append("not empty(sessions_metadata.user_id)") +# +# with ch_client.ClickHouseClient() as ch: +# ch_query = f"""SELECT ifNotFinite(AVG(count),0) AS avg +# FROM(SELECT COUNT(user_id) AS count +# FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY user_id) AS users_connexions +# GROUP BY number_of_days +# ORDER BY number_of_days) AS results;""" +# params = {"project_id": project_id, +# "startTimestamp": startTimestamp, "endTimestamp": endTimestamp, **__get_constraint_values(args)} +# # print(ch_query % params) +# # print("---------------------") +# avg = ch.execute(ch_query, params) +# if len(avg) == 0 or avg[0]["avg"] == 0: +# return {"avg": 0, "partition": []} +# avg = avg[0]["avg"] +# ch_query = f"""SELECT number_of_days, COUNT(user_id) AS count +# FROM (SELECT user_id, COUNT(DISTINCT toStartOfDay(datetime)) AS number_of_days +# FROM sessions_metadata +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY user_id) AS users_connexions +# GROUP BY number_of_days +# ORDER BY number_of_days;""" +# +# # print(ch_query % params) +# # print("---------------------") +# rows = ch.execute(ch_query, params) +# +# return {"avg": avg, "partition": helper.list_to_camel_case(rows)} +# +# +# def users_slipping(project_id, startTimestamp=TimeUTC.now(delta_days=-70), endTimestamp=TimeUTC.now(), filters=[], +# **args): +# ch_sub_query = __get_basic_constraints(table_name="feature", data=args) +# event_type = "PAGES" +# event_value = "/" +# extra_values = {} +# default = True +# meta_condition = [] +# for f in filters: +# if f.type == "EVENT_TYPE" and JOURNEY_TYPES.get(f["value"]): +# event_type = f["value"] +# elif f.type == "EVENT_VALUE": +# event_value = f["value"] +# default = False +# elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_ios]: +# meta_condition.append(f"sessions_metadata.user_id = %(user_id)s") +# meta_condition.append("sessions_metadata.project_id = %(project_id)s") +# meta_condition.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# meta_condition.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# extra_values["user_id"] = f["value"] +# event_table = JOURNEY_TYPES[event_type]["table"] +# event_column = JOURNEY_TYPES[event_type]["column"] +# +# meta_condition += __get_meta_constraint(args) +# ch_sub_query += meta_condition +# with ch_client.ClickHouseClient() as ch: +# if default: +# # get most used value +# ch_query = f"""SELECT {event_column} AS value, COUNT(*) AS count +# FROM {event_table} AS feature +# {"INNER JOIN sessions_metadata USING (session_id)" if len(meta_condition) > 0 else ""} +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY value +# ORDER BY count DESC +# LIMIT 1;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# print(ch_query % params) +# row = ch.execute(ch_query, params) +# if len(row) > 0: +# event_value = row[0]["value"] +# else: +# print(f"no {event_table} most used value") +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, +# {"type": "EVENT_VALUE", "value": ""}], +# "list": [] +# } +# extra_values["value"] = event_value +# if len(meta_condition) == 0: +# ch_sub_query.append("sessions_metadata.user_id IS NOT NULL") +# ch_sub_query.append("not empty(sessions_metadata.user_id)") +# ch_sub_query.append("sessions_metadata.project_id = %(project_id)s") +# ch_sub_query.append("sessions_metadata.datetime >= toDateTime(%(startTimestamp)s/1000)") +# ch_sub_query.append("sessions_metadata.datetime < toDateTime(%(endTimestamp)s/1000)") +# ch_sub_query.append(f"feature.{event_column} = %(value)s") +# ch_query = f"""SELECT user_id, +# toUnixTimestamp(last_time)*1000 AS last_time, +# interactions_count, +# toUnixTimestamp(first_seen) * 1000 AS first_seen, +# toUnixTimestamp(last_seen) * 1000 AS last_seen +# FROM (SELECT user_id, last_time, interactions_count, MIN(datetime) AS first_seen, MAX(datetime) AS last_seen +# FROM (SELECT user_id, MAX(datetime) AS last_time, COUNT(DISTINCT session_id) AS interactions_count +# FROM {event_table} AS feature INNER JOIN sessions_metadata USING (session_id) +# WHERE {" AND ".join(ch_sub_query)} +# GROUP BY user_id ) AS user_last_usage INNER JOIN sessions_metadata USING (user_id) +# WHERE now() - last_time > 7 +# GROUP BY user_id, last_time, interactions_count +# ORDER BY interactions_count DESC, last_time DESC +# LIMIT 50) AS raw_results;""" +# params = {"project_id": project_id, "startTimestamp": startTimestamp, +# "endTimestamp": endTimestamp, **__get_constraint_values(args), **extra_values} +# print(ch_query % params) +# rows = ch.execute(ch_query, params) +# return { +# "startTimestamp": startTimestamp, +# "filters": [{"type": "EVENT_TYPE", "value": event_type}, {"type": "EVENT_VALUE", "value": event_value}], +# "list": helper.list_to_camel_case(rows) +# } +# +# +# def search(text, feature_type, project_id, platform=None): +# if not feature_type: +# resource_type = "ALL" +# data = search(text=text, feature_type=resource_type, project_id=project_id, platform=platform) +# return data +# args = {} if platform is None else {"platform": platform} +# ch_sub_query = __get_basic_constraints(table_name="feature", data=args) +# meta_condition = __get_meta_constraint(args) +# ch_sub_query += meta_condition +# params = {"startTimestamp": TimeUTC.now() - 1 * TimeUTC.MS_MONTH, +# "endTimestamp": TimeUTC.now(), +# "project_id": project_id, +# "value": text.lower(), +# "platform_0": platform} +# if feature_type == "ALL": +# with ch_client.ClickHouseClient() as ch: +# sub_queries = [] +# for e in JOURNEY_TYPES: +# sub_queries.append(f"""(SELECT DISTINCT {JOURNEY_TYPES[e]["column"]} AS value, '{e}' AS "type" +# FROM {JOURNEY_TYPES[e]["table"]} AS feature +# WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[e]["column"]},%(value)s)!=0 +# LIMIT 10)""") +# ch_query = "UNION ALL".join(sub_queries) +# print(ch_query % params) +# rows = ch.execute(ch_query, params) +# elif JOURNEY_TYPES.get(feature_type) is not None: +# with ch_client.ClickHouseClient() as ch: +# ch_query = f"""SELECT DISTINCT {JOURNEY_TYPES[feature_type]["column"]} AS value, '{feature_type}' AS "type" +# FROM {JOURNEY_TYPES[feature_type]["table"]} AS feature +# WHERE {" AND ".join(ch_sub_query)} AND positionUTF8({JOURNEY_TYPES[feature_type]["column"]},%(value)s)!=0 +# LIMIT 10;""" +# print(ch_query % params) +# rows = ch.execute(ch_query, params) +# else: +# return [] +# return [helper.dict_to_camel_case(row) for row in rows] diff --git a/ee/api/chalicelib/core/projects.py b/ee/api/chalicelib/core/projects.py index 57e51227d..4a2c25fd4 100644 --- a/ee/api/chalicelib/core/projects.py +++ b/ee/api/chalicelib/core/projects.py @@ -42,12 +42,12 @@ def __update(tenant_id, project_id, changes): return helper.dict_to_camel_case(cur.fetchone()) -def __create(tenant_id, name): +def __create(tenant_id, data): with pg_client.PostgresClient() as cur: - query = cur.mogrify(f"""INSERT INTO public.projects (tenant_id, name, active) - VALUES (%(tenant_id)s,%(name)s,TRUE) + query = cur.mogrify(f"""INSERT INTO public.projects (tenant_id, name, platform, active) + VALUES (%(tenant_id)s,%(name)s,%(platform)s,TRUE) RETURNING project_id;""", - {"tenant_id": tenant_id, "name": name}) + {"tenant_id": tenant_id, **data}) cur.execute(query=query) project_id = cur.fetchone()["project_id"] return get_project(tenant_id=tenant_id, project_id=project_id, include_gdpr=True) @@ -79,14 +79,15 @@ def get_projects(tenant_id: int, gdpr: bool = False, recorded: bool = False, use query = cur.mogrify(f"""{"SELECT *, first_recorded IS NOT NULL AS recorded FROM (" if recorded else ""} SELECT s.project_id, s.name, s.project_key, s.save_request_payloads, s.first_recorded_session_at, - created_at, sessions_last_check_at, sample_rate {extra_projection} + s.created_at, s.sessions_last_check_at, s.sample_rate, s.platform + {extra_projection} FROM public.projects AS s {role_query if user_id is not None else ""} WHERE s.tenant_id =%(tenant_id)s AND s.deleted_at IS NULL ORDER BY s.name {") AS raw" if recorded else ""};""", - {"tenant_id": tenant_id, "user_id": user_id, "now": TimeUTC.now(), - "check_delta": TimeUTC.MS_HOUR * 4}) + {"now": TimeUTC.now(), "check_delta": TimeUTC.MS_HOUR * 4, + "tenant_id": tenant_id, "user_id": user_id}) cur.execute(query) rows = cur.fetchall() # if recorded is requested, check if it was saved or computed @@ -145,6 +146,29 @@ def get_project(tenant_id, project_id, include_last_session=False, include_gdpr= return helper.dict_to_camel_case(row) +def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None): + with pg_client.PostgresClient() as cur: + extra_select = "" + if include_last_session: + extra_select += """,(SELECT max(ss.start_ts) + FROM public.sessions AS ss + WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at""" + if include_gdpr: + extra_select += ",s.gdpr" + query = cur.mogrify(f"""SELECT s.project_key, + s.name + {extra_select} + FROM public.projects AS s + WHERE s.project_key =%(project_key)s + AND s.tenant_id =%(tenant_id)s + AND s.deleted_at IS NULL + LIMIT 1;""", + {"project_key": project_key, "tenant_id": tenant_id}) + cur.execute(query=query) + row = cur.fetchone() + return helper.dict_to_camel_case(row) + + def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authorization=False): if __exists_by_name(name=data.name, exclude_id=None, tenant_id=tenant_id): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") @@ -154,7 +178,7 @@ def create(tenant_id, user_id, data: schemas.CreateProjectSchema, skip_authoriza return {"errors": ["unauthorized"]} if admin["roleId"] is not None and not admin["allProjects"]: return {"errors": ["unauthorized: you need allProjects permission to create a new project"]} - return {"data": __create(tenant_id=tenant_id, name=data.name)} + return {"data": __create(tenant_id=tenant_id, data=data.model_dump())} def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema): @@ -164,7 +188,7 @@ def edit(tenant_id, user_id, project_id, data: schemas.CreateProjectSchema): if not admin["admin"] and not admin["superAdmin"]: return {"errors": ["unauthorized"]} return {"data": __update(tenant_id=tenant_id, project_id=project_id, - changes={"name": data.name})} + changes=data.model_dump())} def delete(tenant_id, user_id, project_id): @@ -195,14 +219,14 @@ def get_gdpr(project_id): return row -def edit_gdpr(project_id, gdpr): +def edit_gdpr(project_id, gdpr: schemas.GdprSchema): with pg_client.PostgresClient() as cur: query = cur.mogrify("""UPDATE public.projects SET gdpr = gdpr|| %(gdpr)s WHERE project_id = %(project_id)s AND deleted_at ISNULL RETURNING gdpr;""", - {"project_id": project_id, "gdpr": json.dumps(gdpr)}) + {"project_id": project_id, "gdpr": json.dumps(gdpr.model_dump_json())}) cur.execute(query=query) row = cur.fetchone() if not row: @@ -216,7 +240,7 @@ def get_internal_project_id(project_key): with pg_client.PostgresClient() as cur: query = cur.mogrify("""SELECT project_id FROM public.projects - WHERE project_key =%(project_key)s + WHERE project_key =%(project_key)s AND deleted_at ISNULL;""", {"project_key": project_key}) cur.execute(query=query) @@ -247,20 +271,14 @@ def get_capture_status(project_id): return helper.dict_to_camel_case(cur.fetchone()) -def update_capture_status(project_id, changes): - if "rate" not in changes and "captureAll" not in changes: - return {"errors": ["please provide 'rate' and/or 'captureAll' attributes to update."]} - if int(changes["rate"]) < 0 or int(changes["rate"]) > 100: - return {"errors": ["'rate' must be between 0..100."]} - sample_rate = 0 - if "rate" in changes: - sample_rate = int(changes["rate"]) - if changes.get("captureAll"): +def update_capture_status(project_id, changes: schemas.SampleRateSchema): + sample_rate = changes.rate + if changes.capture_all: sample_rate = 100 with pg_client.PostgresClient() as cur: query = cur.mogrify("""UPDATE public.projects SET sample_rate= %(sample_rate)s - WHERE project_id =%(project_id)s + WHERE project_id =%(project_id)s AND deleted_at ISNULL;""", {"project_id": project_id, "sample_rate": sample_rate}) cur.execute(query=query) @@ -280,30 +298,6 @@ def get_projects_ids(tenant_id): return [r["project_id"] for r in rows] -def get_project_by_key(tenant_id, project_key, include_last_session=False, include_gdpr=None): - with pg_client.PostgresClient() as cur: - extra_select = "" - if include_last_session: - extra_select += """,(SELECT max(ss.start_ts) - FROM public.sessions AS ss - WHERE ss.project_key = %(project_key)s) AS last_recorded_session_at""" - if include_gdpr: - extra_select += ",s.gdpr" - query = cur.mogrify(f"""SELECT s.project_key, - s.name - {extra_select} - FROM public.projects AS s - WHERE s.project_key =%(project_key)s - AND s.tenant_id =%(tenant_id)s - AND s.deleted_at IS NULL - LIMIT 1;""", - {"project_key": project_key, "tenant_id": tenant_id}) - - cur.execute(query=query) - row = cur.fetchone() - return helper.dict_to_camel_case(row) - - def is_authorized(project_id, tenant_id, user_id=None): if project_id is None or not str(project_id).isdigit(): return False diff --git a/ee/api/chalicelib/core/roles.py b/ee/api/chalicelib/core/roles.py index 077bb2464..90035ebe8 100644 --- a/ee/api/chalicelib/core/roles.py +++ b/ee/api/chalicelib/core/roles.py @@ -1,8 +1,7 @@ from typing import Optional from fastapi import HTTPException, status - -import schemas_ee +import schemas from chalicelib.core import users, projects from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC @@ -22,7 +21,7 @@ def __exists_by_name(tenant_id: int, name: str, exclude_id: Optional[int]) -> bo return row["exists"] -def update(tenant_id, user_id, role_id, data: schemas_ee.RolePayloadSchema): +def update(tenant_id, user_id, role_id, data: schemas.RolePayloadSchema): admin = users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: @@ -57,7 +56,7 @@ def update(tenant_id, user_id, role_id, data: schemas_ee.RolePayloadSchema): RETURNING *, COALESCE((SELECT ARRAY_AGG(project_id) FROM roles_projects WHERE roles_projects.role_id=%(role_id)s),'{}') AS projects;""", - {"tenant_id": tenant_id, "role_id": role_id, **data.dict()}) + {"tenant_id": tenant_id, "role_id": role_id, **data.model_dump()}) cur.execute(query=query) row = cur.fetchone() row["created_at"] = TimeUTC.datetime_to_timestamp(row["created_at"]) @@ -80,7 +79,7 @@ def update(tenant_id, user_id, role_id, data: schemas_ee.RolePayloadSchema): return helper.dict_to_camel_case(row) -def create(tenant_id, user_id, data: schemas_ee.RolePayloadSchema): +def create(tenant_id, user_id, data: schemas.RolePayloadSchema): admin = users.get(user_id=user_id, tenant_id=tenant_id) if not admin["admin"] and not admin["superAdmin"]: diff --git a/ee/api/chalicelib/core/sessions_devtool.py b/ee/api/chalicelib/core/sessions_devtool.py index 82d8e8564..e0b7e5fae 100644 --- a/ee/api/chalicelib/core/sessions_devtool.py +++ b/ee/api/chalicelib/core/sessions_devtool.py @@ -1,11 +1,11 @@ from decouple import config from fastapi.security import SecurityScopes -import schemas_ee +import schemas from chalicelib.core import permissions from chalicelib.utils.storage import StorageClient -SCOPES = SecurityScopes([schemas_ee.Permissions.dev_tools]) +SCOPES = SecurityScopes([schemas.Permissions.dev_tools]) def __get_devtools_keys(project_id, session_id): @@ -18,7 +18,7 @@ def __get_devtools_keys(project_id, session_id): ] -def get_urls(session_id, project_id, context: schemas_ee.CurrentContext, check_existence: bool = True): +def get_urls(session_id, project_id, context: schemas.CurrentContext, check_existence: bool = True): if not permissions.check(security_scopes=SCOPES, context=context): return [] results = [] diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index 08e572740..8a2425398 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -2,7 +2,7 @@ import ast from typing import List, Union import schemas -import schemas_ee +import schemas from chalicelib.core import events, metadata, projects, performance_event, metrics from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper @@ -246,7 +246,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ else: for i in range(len(sessions)): sessions[i]["metadata"] = ast.literal_eval(sessions[i]["metadata"]) - sessions[i] = schemas_ee.SessionModel.parse_obj(helper.dict_to_camel_case(sessions[i])) + sessions[i] = schemas.SessionModel.parse_obj(helper.dict_to_camel_case(sessions[i])) # if not data.group_by_user and data.sort is not None and data.sort != "session_id": # sessions = sorted(sessions, key=lambda s: s[helper.key_to_snake_case(data.sort)], @@ -260,12 +260,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, density: int, view_type: schemas.MetricTimeseriesViewType, metric_type: schemas.MetricType, metric_of: schemas.MetricOfTable, metric_value: List): - step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endDate, startTimestamp=data.startDate, + step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp, density=density)) extra_event = None if metric_of == schemas.MetricOfTable.visited_url: extra_event = f"""SELECT DISTINCT ev.session_id, ev.url_path - FROM {exp_ch_helper.get_main_events_table(data.startDate)} AS ev + FROM {exp_ch_helper.get_main_events_table(data.startTimestamp)} AS ev WHERE ev.datetime >= toDateTime(%(startDate)s / 1000) AND ev.datetime <= toDateTime(%(endDate)s / 1000) AND ev.project_id = %(project_id)s @@ -300,7 +300,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d # print("--------------------") sessions = cur.execute(main_query) if view_type == schemas.MetricTimeseriesViewType.line_chart: - sessions = metrics.__complete_missing_steps(start_time=data.startDate, end_time=data.endDate, + sessions = metrics.__complete_missing_steps(start_time=data.startTimestamp, end_time=data.endTimestamp, density=density, neutral={"count": 0}, rows=sessions) else: sessions = sessions[0]["count"] if len(sessions) > 0 else 0 @@ -362,7 +362,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d return sessions -def __is_valid_event(is_any: bool, event: schemas._SessionSearchEventSchema): +def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2): return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details, schemas.EventType.graphql] \ or event.type in [schemas.PerformanceEventType.location_dom_complete, @@ -402,11 +402,11 @@ def __get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEve def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_status, errors_only, favorite_only, issue, project_id, user_id, extra_event=None): ss_constraints = [] - full_args = {"project_id": project_id, "startDate": data.startDate, "endDate": data.endDate, + full_args = {"project_id": project_id, "startDate": data.startTimestamp, "endDate": data.endTimestamp, "projectId": project_id, "userId": user_id} - MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(data.startDate) - MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(data.startDate) + MAIN_EVENTS_TABLE = exp_ch_helper.get_main_events_table(data.startTimestamp) + MAIN_SESSIONS_TABLE = exp_ch_helper.get_main_sessions_table(data.startTimestamp) full_args["MAIN_EVENTS_TABLE"] = MAIN_EVENTS_TABLE full_args["MAIN_SESSIONS_TABLE"] = MAIN_SESSIONS_TABLE @@ -1224,9 +1224,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu else: data.events = [] # --------------------------------------------------------------------------- - if data.startDate is not None: + if data.startTimestamp is not None: extra_constraints.append("s.datetime >= toDateTime(%(startDate)s/1000)") - if data.endDate is not None: + if data.endTimestamp is not None: extra_constraints.append("s.datetime <= toDateTime(%(endDate)s/1000)") # if data.platform is not None: # if data.platform == schemas.PlatformType.mobile: diff --git a/ee/api/chalicelib/core/sessions_favorite.py b/ee/api/chalicelib/core/sessions_favorite.py index 8d8f7043b..e7722b54e 100644 --- a/ee/api/chalicelib/core/sessions_favorite.py +++ b/ee/api/chalicelib/core/sessions_favorite.py @@ -1,12 +1,12 @@ from decouple import config -import schemas_ee +import schemas from chalicelib.core import sessions, sessions_favorite_exp, sessions_mobs, sessions_devtool from chalicelib.utils import pg_client from chalicelib.utils.storage import extra -def add_favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): +def add_favorite_session(context: schemas.CurrentContext, project_id, session_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ @@ -22,7 +22,7 @@ def add_favorite_session(context: schemas_ee.CurrentContext, project_id, session return {"errors": ["something went wrong"]} -def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): +def remove_favorite_session(context: schemas.CurrentContext, project_id, session_id): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify(f"""\ @@ -39,7 +39,7 @@ def remove_favorite_session(context: schemas_ee.CurrentContext, project_id, sess return {"errors": ["something went wrong"]} -def favorite_session(context: schemas_ee.CurrentContext, project_id, session_id): +def favorite_session(context: schemas.CurrentContext, project_id, session_id): keys = sessions_mobs.__get_mob_keys(project_id=project_id, session_id=session_id) keys += sessions_mobs.__get_mob_keys_deprecated(session_id=session_id) # To support old sessions keys += sessions_devtool.__get_devtools_keys(project_id=project_id, session_id=session_id) diff --git a/ee/api/chalicelib/core/sessions_insights.py b/ee/api/chalicelib/core/sessions_insights.py index aced9b802..46e348f19 100644 --- a/ee/api/chalicelib/core/sessions_insights.py +++ b/ee/api/chalicelib/core/sessions_insights.py @@ -1,7 +1,7 @@ from typing import Optional import schemas -import schemas_ee +import schemas from chalicelib.core import metrics from chalicelib.utils import ch_client @@ -161,7 +161,7 @@ def query_requests_by_period(project_id, start_time, end_time, filters: Optional for n in names_: if n is None: continue - data_ = {'category': schemas_ee.InsightCategories.network, 'name': n, + data_ = {'category': schemas.InsightCategories.network, 'name': n, 'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True} for n_, v in ratio: if n == n_: @@ -266,7 +266,7 @@ def query_most_errors_by_period(project_id, start_time, end_time, for n in names_: if n is None: continue - data_ = {'category': schemas_ee.InsightCategories.errors, 'name': n, + data_ = {'category': schemas.InsightCategories.errors, 'name': n, 'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True} for n_, v in ratio: if n == n_: @@ -346,7 +346,7 @@ def query_cpu_memory_by_period(project_id, start_time, end_time, output = list() if cpu_oldvalue is not None or cpu_newvalue is not None: - output.append({'category': schemas_ee.InsightCategories.resources, + output.append({'category': schemas.InsightCategories.resources, 'name': 'cpu', 'value': cpu_newvalue, 'oldValue': cpu_oldvalue, @@ -354,7 +354,7 @@ def query_cpu_memory_by_period(project_id, start_time, end_time, cpu_newvalue - cpu_oldvalue) / cpu_oldvalue if cpu_ratio is not None else cpu_ratio, 'isNew': True if cpu_newvalue is not None and cpu_oldvalue is None else False}) if mem_oldvalue is not None or mem_newvalue is not None: - output.append({'category': schemas_ee.InsightCategories.resources, + output.append({'category': schemas.InsightCategories.resources, 'name': 'memory', 'value': mem_newvalue, 'oldValue': mem_oldvalue, @@ -434,7 +434,7 @@ def query_click_rage_by_period(project_id, start_time, end_time, for n in names_: if n is None: continue - data_ = {'category': schemas_ee.InsightCategories.rage, 'name': n, + data_ = {'category': schemas.InsightCategories.rage, 'name': n, 'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True} for n_, v in ratio: if n == n_: @@ -453,26 +453,26 @@ def query_click_rage_by_period(project_id, start_time, end_time, return results -def fetch_selected(project_id, data: schemas_ee.GetInsightsSchema): +def fetch_selected(project_id, data: schemas.GetInsightsSchema): output = list() if data.metricValue is None or len(data.metricValue) == 0: data.metricValue = [] - for v in schemas_ee.InsightCategories: + for v in schemas.InsightCategories: data.metricValue.append(v) filters = None if len(data.series) > 0: filters = data.series[0].filter - if schemas_ee.InsightCategories.errors in data.metricValue: + if schemas.InsightCategories.errors in data.metricValue: output += query_most_errors_by_period(project_id=project_id, start_time=data.startTimestamp, end_time=data.endTimestamp, filters=filters) - if schemas_ee.InsightCategories.network in data.metricValue: + if schemas.InsightCategories.network in data.metricValue: output += query_requests_by_period(project_id=project_id, start_time=data.startTimestamp, end_time=data.endTimestamp, filters=filters) - if schemas_ee.InsightCategories.rage in data.metricValue: + if schemas.InsightCategories.rage in data.metricValue: output += query_click_rage_by_period(project_id=project_id, start_time=data.startTimestamp, end_time=data.endTimestamp, filters=filters) - if schemas_ee.InsightCategories.resources in data.metricValue: + if schemas.InsightCategories.resources in data.metricValue: output += query_cpu_memory_by_period(project_id=project_id, start_time=data.startTimestamp, end_time=data.endTimestamp, filters=filters) return output diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 0e58d276b..3d7340fc4 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -88,7 +88,8 @@ def create(tenant_id, user_id, project_id, session_id, data: schemas.SessionNote query = cur.mogrify(f"""INSERT INTO public.sessions_notes (message, user_id, tag, session_id, project_id, timestamp, is_public) VALUES (%(message)s, %(user_id)s, %(tag)s, %(session_id)s, %(project_id)s, %(timestamp)s, %(is_public)s) RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""", - {"user_id": user_id, "project_id": project_id, "session_id": session_id, **data.dict(), + {"user_id": user_id, "project_id": project_id, "session_id": session_id, + **data.model_dump(), "tenant_id": tenant_id}) cur.execute(query) result = helper.dict_to_camel_case(cur.fetchone()) @@ -118,7 +119,7 @@ def edit(tenant_id, user_id, project_id, note_id, data: schemas.SessionUpdateNot AND note_id = %(note_id)s AND deleted_at ISNULL RETURNING *,(SELECT name FROM users WHERE users.user_id=%(user_id)s AND users.tenant_id=%(tenant_id)s) AS user_name;""", - {"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.dict(), + {"project_id": project_id, "user_id": user_id, "note_id": note_id, **data.model_dump(), "tenant_id": tenant_id}) ) row = helper.dict_to_camel_case(cur.fetchone()) diff --git a/ee/api/chalicelib/core/sessions_replay.py b/ee/api/chalicelib/core/sessions_replay.py index 1ba4242ad..303d8b594 100644 --- a/ee/api/chalicelib/core/sessions_replay.py +++ b/ee/api/chalicelib/core/sessions_replay.py @@ -1,5 +1,5 @@ import schemas -import schemas_ee +import schemas from chalicelib.core import events, metadata, events_ios, \ sessions_mobs, issues, resources, assist, sessions_devtool, sessions_notes from chalicelib.utils import errors_helper @@ -17,7 +17,7 @@ def __group_metadata(session, project_metadata): # for backward compatibility # This function should not use Clickhouse because it doesn't have `file_key` -def get_by_id2_pg(project_id, session_id, context: schemas_ee.CurrentContext, full_data=False, +def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_data=False, include_fav_viewed=False, group_metadata=False, live=True): with pg_client.PostgresClient() as cur: extra_query = [] diff --git a/ee/api/chalicelib/core/signals.py b/ee/api/chalicelib/core/signals.py index 72822d0af..37bb95b44 100644 --- a/ee/api/chalicelib/core/signals.py +++ b/ee/api/chalicelib/core/signals.py @@ -1,9 +1,9 @@ -import schemas_ee +import schemas import logging from chalicelib.utils import events_queue -def handle_frontend_signals_queued(project_id: int, user_id: int, data: schemas_ee.SignalsSchema): +def handle_frontend_signals_queued(project_id: int, user_id: int, data: schemas.SignalsSchema): try: events_queue.global_queue.put((project_id, user_id, data)) return {'data': 'insertion succeded'} diff --git a/ee/api/chalicelib/core/significance.py b/ee/api/chalicelib/core/significance.py index 52650bfd7..003fea244 100644 --- a/ee/api/chalicelib/core/significance.py +++ b/ee/api/chalicelib/core/significance.py @@ -30,17 +30,19 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36 21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042} -def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: +def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]: """ Add minimal timestamp :param filter_d: dict contains events&filters&... :return: """ - stages: [dict] = filter_d.get("events", []) - filters: [dict] = filter_d.get("filters", []) - filter_issues = filter_d.get("issueTypes") - if filter_issues is None or len(filter_issues) == 0: - filter_issues = [] + stages: [dict] = filter_d.events + filters: [dict] = filter_d.filters + filter_issues = [] + # TODO: enable this if needed by an endpoint + # filter_issues = filter_d.get("issueTypes") + # if filter_issues is None or len(filter_issues) == 0: + # filter_issues = [] stage_constraints = ["main.timestamp <= %(endTimestamp)s"] first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s", "s.start_ts <= %(endTimestamp)s"] @@ -126,22 +128,22 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: i = -1 for s in stages: - if s.get("operator") is None: - s["operator"] = "is" + if s.operator is None: + s.operator = schemas.SearchEventOperator._is - if not isinstance(s["value"], list): - s["value"] = [s["value"]] - is_any = sh.isAny_opreator(s["operator"]) - if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0: + if not isinstance(s.value, list): + s.value = [s.value] + is_any = sh.isAny_opreator(s.operator) + if not is_any and isinstance(s.value, list) and len(s.value) == 0: continue i += 1 if i == 0: extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"] else: extra_from = [] - op = sh.get_sql_operator(s["operator"]) + op = sh.get_sql_operator(s.operator) # event_type = s["type"].upper() - event_type = s["type"] + event_type = s.type if event_type == events.EventType.CLICK.ui_type: next_table = events.EventType.CLICK.table next_col_name = events.EventType.CLICK.column @@ -171,16 +173,16 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: print(f"=================UNDEFINED:{event_type}") continue - values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), + values = {**values, **sh.multi_values(helper.values_for_operator(value=s.value, op=s.operator), value_key=f"value{i + 1}")} - if sh.is_negation_operator(s["operator"]) and i > 0: + if sh.is_negation_operator(s.operator) and i > 0: op = sh.reverse_sql_operator(op) main_condition = "left_not.session_id ISNULL" extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id FROM {next_table} AS s_main WHERE {sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s", - values=s["value"], value_key=f"value{i + 1}")} + values=s.value, value_key=f"value{i + 1}")} AND s_main.timestamp >= T{i}.stage{i}_timestamp AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""") else: @@ -188,7 +190,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: main_condition = "TRUE" else: main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s", - values=s["value"], value_key=f"value{i + 1}") + values=s.value, value_key=f"value{i + 1}") n_stages_query.append(f""" (SELECT main.session_id, {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp @@ -231,7 +233,8 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: """ # LIMIT 10000 - params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"], + params = {"project_id": project_id, "startTimestamp": filter_d.startTimestamp, + "endTimestamp": filter_d.endTimestamp, "issueTypes": tuple(filter_issues), **values} with pg_client.PostgresClient() as cur: query = cur.mogrify(n_stages_query, params) @@ -245,7 +248,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------") print(query.decode('UTF-8')) print("--------- PAYLOAD -----------") - print(filter_d) + print(filter_d.model_dump_json()) print("--------------------") raise err return rows @@ -550,9 +553,9 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False) return n_critical_issues, issues_dict, total_drop_due_to_issues -def get_top_insights(filter_d, project_id): +def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id): output = [] - stages = filter_d.get("events", []) + stages = filter_d.events # TODO: handle 1 stage alone if len(stages) == 0: print("no stages found") @@ -560,17 +563,24 @@ def get_top_insights(filter_d, project_id): elif len(stages) == 1: # TODO: count sessions, and users for single stage output = [{ - "type": stages[0]["type"], - "value": stages[0]["value"], + "type": stages[0].type, + "value": stages[0].value, "dropPercentage": None, - "operator": stages[0]["operator"], + "operator": stages[0].operator, "sessionsCount": 0, "dropPct": 0, "usersCount": 0, "dropDueToIssues": 0 }] - counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d), + # original + # counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d), + # project_id=project_id, user_id=None, count_only=True) + # first change + # counts = sessions.search_sessions(data=schemas.FlatSessionsSearchPayloadSchema.parse_obj(filter_d), + # project_id=project_id, user_id=None, count_only=True) + # last change + counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d), project_id=project_id, user_id=None, count_only=True) output[0]["sessionsCount"] = counts["countSessions"] output[0]["usersCount"] = counts["countUsers"] @@ -589,9 +599,9 @@ def get_top_insights(filter_d, project_id): return stages_list, total_drop_due_to_issues -def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None): +def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None): output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []}) - stages = filter_d.get("events", []) + stages = filter_d.events # The result of the multi-stage query rows = get_stages_and_events(filter_d=filter_d, project_id=project_id) # print(json.dumps(rows[0],indent=4)) diff --git a/ee/api/chalicelib/core/significance_exp.py b/ee/api/chalicelib/core/significance_exp.py index 52650bfd7..df8805961 100644 --- a/ee/api/chalicelib/core/significance_exp.py +++ b/ee/api/chalicelib/core/significance_exp.py @@ -1,4 +1,3 @@ -__author__ = "AZNAUROV David" __maintainer__ = "KRAIEM Taha Yassine" from decouple import config @@ -30,17 +29,19 @@ T_VALUES = {1: 12.706, 2: 4.303, 3: 3.182, 4: 2.776, 5: 2.571, 6: 2.447, 7: 2.36 21: 2.080, 22: 2.074, 23: 2.069, 25: 2.064, 26: 2.060, 27: 2.056, 28: 2.052, 29: 2.045, 30: 2.042} -def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: +def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) -> List[RealDictRow]: """ Add minimal timestamp :param filter_d: dict contains events&filters&... :return: """ - stages: [dict] = filter_d.get("events", []) - filters: [dict] = filter_d.get("filters", []) - filter_issues = filter_d.get("issueTypes") - if filter_issues is None or len(filter_issues) == 0: - filter_issues = [] + stages: [dict] = filter_d.events + filters: [dict] = filter_d.filters + filter_issues = [] + # TODO: enable this if needed by an endpoint + # filter_issues = filter_d.get("issueTypes") + # if filter_issues is None or len(filter_issues) == 0: + # filter_issues = [] stage_constraints = ["main.timestamp <= %(endTimestamp)s"] first_stage_extra_constraints = ["s.project_id=%(project_id)s", "s.start_ts >= %(startTimestamp)s", "s.start_ts <= %(endTimestamp)s"] @@ -126,22 +127,22 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: i = -1 for s in stages: - if s.get("operator") is None: - s["operator"] = "is" + if s.operator is None: + s.operator = schemas.SearchEventOperator._is - if not isinstance(s["value"], list): - s["value"] = [s["value"]] - is_any = sh.isAny_opreator(s["operator"]) - if not is_any and isinstance(s["value"], list) and len(s["value"]) == 0: + if not isinstance(s.value, list): + s.value = [s.value] + is_any = sh.isAny_opreator(s.operator) + if not is_any and isinstance(s.value, list) and len(s.value) == 0: continue i += 1 if i == 0: extra_from = filter_extra_from + ["INNER JOIN public.sessions AS s USING (session_id)"] else: extra_from = [] - op = sh.get_sql_operator(s["operator"]) + op = sh.get_sql_operator(s.operator) # event_type = s["type"].upper() - event_type = s["type"] + event_type = s.type if event_type == events.EventType.CLICK.ui_type: next_table = events.EventType.CLICK.table next_col_name = events.EventType.CLICK.column @@ -171,16 +172,16 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: print(f"=================UNDEFINED:{event_type}") continue - values = {**values, **sh.multi_values(helper.values_for_operator(value=s["value"], op=s["operator"]), + values = {**values, **sh.multi_values(helper.values_for_operator(value=s.value, op=s.operator), value_key=f"value{i + 1}")} - if sh.is_negation_operator(s["operator"]) and i > 0: + if sh.is_negation_operator(s.operator) and i > 0: op = sh.reverse_sql_operator(op) main_condition = "left_not.session_id ISNULL" extra_from.append(f"""LEFT JOIN LATERAL (SELECT session_id FROM {next_table} AS s_main WHERE {sh.multi_conditions(f"s_main.{next_col_name} {op} %(value{i + 1})s", - values=s["value"], value_key=f"value{i + 1}")} + values=s.value, value_key=f"value{i + 1}")} AND s_main.timestamp >= T{i}.stage{i}_timestamp AND s_main.session_id = T1.session_id) AS left_not ON (TRUE)""") else: @@ -188,7 +189,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: main_condition = "TRUE" else: main_condition = sh.multi_conditions(f"main.{next_col_name} {op} %(value{i + 1})s", - values=s["value"], value_key=f"value{i + 1}") + values=s.value, value_key=f"value{i + 1}") n_stages_query.append(f""" (SELECT main.session_id, {"MIN(main.timestamp)" if i + 1 < len(stages) else "MAX(main.timestamp)"} AS stage{i + 1}_timestamp @@ -231,7 +232,8 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: """ # LIMIT 10000 - params = {"project_id": project_id, "startTimestamp": filter_d["startDate"], "endTimestamp": filter_d["endDate"], + params = {"project_id": project_id, "startTimestamp": filter_d.startTimestamp, + "endTimestamp": filter_d.endTimestamp, "issueTypes": tuple(filter_issues), **values} with pg_client.PostgresClient() as cur: query = cur.mogrify(n_stages_query, params) @@ -245,7 +247,7 @@ def get_stages_and_events(filter_d, project_id) -> List[RealDictRow]: print("--------- FUNNEL SEARCH QUERY EXCEPTION -----------") print(query.decode('UTF-8')) print("--------- PAYLOAD -----------") - print(filter_d) + print(filter_d.model_dump_json()) print("--------------------") raise err return rows @@ -550,9 +552,9 @@ def get_issues(stages, rows, first_stage=None, last_stage=None, drop_only=False) return n_critical_issues, issues_dict, total_drop_due_to_issues -def get_top_insights(filter_d, project_id): +def get_top_insights(filter_d: schemas.CardSeriesFilterSchema, project_id): output = [] - stages = filter_d.get("events", []) + stages = filter_d.events # TODO: handle 1 stage alone if len(stages) == 0: print("no stages found") @@ -560,17 +562,24 @@ def get_top_insights(filter_d, project_id): elif len(stages) == 1: # TODO: count sessions, and users for single stage output = [{ - "type": stages[0]["type"], - "value": stages[0]["value"], + "type": stages[0].type, + "value": stages[0].value, "dropPercentage": None, - "operator": stages[0]["operator"], + "operator": stages[0].operator, "sessionsCount": 0, "dropPct": 0, "usersCount": 0, "dropDueToIssues": 0 }] - counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d), + # original + # counts = sessions.search_sessions(data=schemas.SessionsSearchCountSchema.parse_obj(filter_d), + # project_id=project_id, user_id=None, count_only=True) + # first change + # counts = sessions.search_sessions(data=schemas.FlatSessionsSearchPayloadSchema.parse_obj(filter_d), + # project_id=project_id, user_id=None, count_only=True) + # last change + counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.model_validate(filter_d), project_id=project_id, user_id=None, count_only=True) output[0]["sessionsCount"] = counts["countSessions"] output[0]["usersCount"] = counts["countUsers"] @@ -589,9 +598,9 @@ def get_top_insights(filter_d, project_id): return stages_list, total_drop_due_to_issues -def get_issues_list(filter_d, project_id, first_stage=None, last_stage=None): +def get_issues_list(filter_d: schemas.CardSeriesFilterSchema, project_id, first_stage=None, last_stage=None): output = dict({"total_drop_due_to_issues": 0, "critical_issues_count": 0, "significant": [], "insignificant": []}) - stages = filter_d.get("events", []) + stages = filter_d.events # The result of the multi-stage query rows = get_stages_and_events(filter_d=filter_d, project_id=project_id) # print(json.dumps(rows[0],indent=4)) diff --git a/ee/api/chalicelib/core/signup.py b/ee/api/chalicelib/core/signup.py index 1344b5e7b..08fd149d1 100644 --- a/ee/api/chalicelib/core/signup.py +++ b/ee/api/chalicelib/core/signup.py @@ -3,7 +3,6 @@ import json from decouple import config import schemas -import schemas_ee from chalicelib.core import users, telemetry, tenants from chalicelib.utils import captcha from chalicelib.utils import helper @@ -19,7 +18,7 @@ def create_tenant(data: schemas.UserSignupSchema): email = data.email print(f"=====================> {email}") - password = data.password + password = data.password.get_secret_value() if email is None or len(email) < 5: errors.append("Invalid email address.") @@ -52,7 +51,7 @@ def create_tenant(data: schemas.UserSignupSchema): params = { "email": email, "password": password, "fullname": fullname, "projectName": project_name, "data": json.dumps({"lastAnnouncementView": TimeUTC.now()}), "organizationName": organization_name, - "permissions": [p.value for p in schemas_ee.Permissions] + "permissions": [p.value for p in schemas.Permissions] } query = """WITH t AS ( INSERT INTO public.tenants (name) diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py index 7c31f8283..f968183ff 100644 --- a/ee/api/chalicelib/core/traces.py +++ b/ee/api/chalicelib/core/traces.py @@ -10,7 +10,7 @@ from starlette.background import BackgroundTask import app as main_app import schemas -import schemas_ee +import schemas from chalicelib.utils import pg_client, helper from chalicelib.utils.TimeUTC import TimeUTC from schemas import CurrentContext @@ -64,7 +64,7 @@ class TraceSchema(BaseModel): def __process_trace(trace: TraceSchema): - data = trace.dict() + data = trace.model_dump() data["parameters"] = json.dumps(trace.parameters) if trace.parameters is not None and len( trace.parameters.keys()) > 0 else None data["payload"] = json.dumps(trace.payload) if trace.payload is not None and len(trace.payload.keys()) > 0 else None @@ -158,7 +158,7 @@ async def process_traces_queue(): await write_traces_batch(traces) -def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): +def get_all(tenant_id, data: schemas.TrailSearchPayloadSchema): with pg_client.PostgresClient() as cur: conditions = ["traces.tenant_id=%(tenant_id)s", "traces.created_at>=%(startDate)s", @@ -168,7 +168,7 @@ def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): "endDate": data.endDate, "p_start": (data.page - 1) * data.limit, "p_end": data.page * data.limit, - **data.dict()} + **data.model_dump()} if data.user_id is not None: conditions.append("user_id=%(user_id)s") if data.action is not None: @@ -184,10 +184,10 @@ def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): COALESCE(JSONB_AGG(full_traces ORDER BY rn) FILTER (WHERE rn > %(p_start)s AND rn <= %(p_end)s), '[]'::JSONB) AS sessions FROM (SELECT traces.*,users.email,users.name AS username, - ROW_NUMBER() OVER (ORDER BY traces.created_at {data.order.value}) AS rn + ROW_NUMBER() OVER (ORDER BY traces.created_at {data.order}) AS rn FROM traces LEFT JOIN users USING (user_id) WHERE {" AND ".join(conditions)} - ORDER BY traces.created_at {data.order.value}) AS full_traces;""", params) + ORDER BY traces.created_at {data.order}) AS full_traces;""", params) ) rows = cur.fetchone() return helper.dict_to_camel_case(rows) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 89fe2e354..319300a87 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -2,15 +2,17 @@ import json import secrets from decouple import config -from fastapi import BackgroundTasks +from fastapi import BackgroundTasks, HTTPException +from starlette import status import schemas -import schemas_ee -from chalicelib.core import authorizers, metadata, projects, roles +from chalicelib.core import authorizers, metadata, projects from chalicelib.core import tenants, assist -from chalicelib.utils import helper, email_helper, smtp +from chalicelib.utils import email_helper, smtp +from chalicelib.utils import helper from chalicelib.utils import pg_client from chalicelib.utils.TimeUTC import TimeUTC +from chalicelib.core import roles def __generate_invitation_token(): @@ -210,33 +212,33 @@ def update(tenant_id, user_id, changes, output=True): return get(user_id=user_id, tenant_id=tenant_id) -def create_member(tenant_id, user_id, data, background_tasks: BackgroundTasks): +def create_member(tenant_id, user_id, data: schemas.CreateMemberSchema, background_tasks: BackgroundTasks): admin = get(tenant_id=tenant_id, user_id=user_id) if not admin["admin"] and not admin["superAdmin"]: return {"errors": ["unauthorized"]} - if data.get("userId") is not None: + if data.user_id is not None: return {"errors": ["please use POST/PUT /client/members/{memberId} for update"]} - user = get_by_email_only(email=data["email"]) + user = get_by_email_only(email=data.email) if user: return {"errors": ["user already exists"]} - name = data.get("name", None) - if name is None or len(name) == 0: - name = data["email"] + + if data.name is None or len(data.name) == 0: + data.name = data.email role_id = data.get("roleId") if role_id is None: role_id = roles.get_role_by_name(tenant_id=tenant_id, name="member").get("roleId") invitation_token = __generate_invitation_token() - user = get_deleted_user_by_email(email=data["email"]) + user = get_deleted_user_by_email(email=data.email) if user is not None and user["tenantId"] == tenant_id: - new_member = restore_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token, - admin=data.get("admin", False), name=name, user_id=user["userId"], role_id=role_id) + new_member = restore_member(tenant_id=tenant_id, email=data.email, invitation_token=invitation_token, + admin=data.admin, name=data.name, user_id=user["userId"], role_id=role_id) elif user is not None: __hard_delete_user(user_id=user["userId"]) new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token, - admin=data.get("admin", False), name=name, role_id=role_id) + admin=data.get("admin", False), name=data.name, role_id=role_id) else: new_member = create_new_member(tenant_id=tenant_id, email=data["email"], invitation_token=invitation_token, - admin=data.get("admin", False), name=name, role_id=role_id) + admin=data.get("admin", False), name=data.name, role_id=role_id) new_member["invitationLink"] = __get_invitation_link(new_member.pop("invitationToken")) background_tasks.add_task(email_helper.send_team_invitation, **{ "recipient": data["email"], @@ -282,7 +284,8 @@ def get(user_id, tenant_id): roles.name AS role_name, roles.permissions, roles.all_projects, - basic_authentication.password IS NOT NULL AS has_password + basic_authentication.password IS NOT NULL AS has_password, + users.service_account FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id LEFT JOIN public.roles USING (role_id) WHERE @@ -351,7 +354,7 @@ def edit_account(user_id, tenant_id, changes: schemas.EditAccountSchema): return {"data": __get_account_info(tenant_id=tenant_id, user_id=user_id)} -def edit_member(user_id_to_update, tenant_id, changes: schemas_ee.EditMemberSchema, editor_id): +def edit_member(user_id_to_update, tenant_id, changes: schemas.EditMemberSchema, editor_id): user = get_member(user_id=user_id_to_update, tenant_id=tenant_id) _changes = {} if editor_id != user_id_to_update: @@ -472,7 +475,9 @@ def get_members(tenant_id): FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id LEFT JOIN public.roles USING (role_id) - WHERE users.tenant_id = %(tenant_id)s AND users.deleted_at IS NULL + WHERE users.tenant_id = %(tenant_id)s + AND users.deleted_at IS NULL + AND NOT users.service_account ORDER BY name, user_id""", {"tenant_id": tenant_id}) ) @@ -626,17 +631,24 @@ def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud): with pg_client.PostgresClient() as cur: cur.execute( cur.mogrify( - f"SELECT user_id AS id,jwt_iat, changed_at FROM public.users INNER JOIN public.basic_authentication USING(user_id) WHERE user_id = %(userId)s AND tenant_id = %(tenant_id)s AND deleted_at IS NULL LIMIT 1;", + f"""SELECT user_id, + jwt_iat, + changed_at, + service_account, + basic_authentication.user_id IS NOT NULL AS has_basic_auth + FROM public.users + LEFT JOIN public.basic_authentication USING(user_id) + WHERE user_id = %(userId)s + AND tenant_id = %(tenant_id)s + AND deleted_at IS NULL + LIMIT 1;""", {"userId": user_id, "tenant_id": tenant_id}) ) r = cur.fetchone() return r is not None \ - and r.get("jwt_iat") is not None \ - and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \ - or (jwt_aud.startswith("plugin") \ - and (r["changed_at"] is None \ - or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000))) - ) + and (r["service_account"] and not r["has_basic_auth"] + or r.get("jwt_iat") is not None \ + and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1)) def change_jwt_iat(user_id): @@ -665,7 +677,8 @@ def authenticate(email, password, for_change_password=False) -> dict | None: users.origin, users.role_id, roles.name AS role_name, - roles.permissions + roles.permissions, + users.service_account FROM public.users AS users INNER JOIN public.basic_authentication USING(user_id) LEFT JOIN public.roles ON (roles.role_id = users.role_id AND roles.tenant_id = users.tenant_id) WHERE users.email = %(email)s @@ -694,7 +707,10 @@ def authenticate(email, password, for_change_password=False) -> dict | None: if for_change_password: return True r = helper.dict_to_camel_case(r) - if config("enforce_SSO", cast=bool, default=False) and helper.is_saml2_available(): + if r["serviceAccount"]: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, + detail="service account is not authorized to login") + elif config("enforce_SSO", cast=bool, default=False) and helper.is_saml2_available(): return {"errors": ["must sign-in with SSO, enforced by admin"]} jwt_iat = change_jwt_iat(r['userId']) @@ -710,33 +726,27 @@ def authenticate(email, password, for_change_password=False) -> dict | None: return None -def authenticate_sso(email, internal_id, exp=None): +def get_user_role(tenant_id, user_id): with pg_client.PostgresClient() as cur: - query = cur.mogrify( - f"""SELECT - users.user_id, - users.tenant_id, - users.role, - users.name, - (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, - (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, - (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, - origin, - role_id - FROM public.users AS users - WHERE users.email = %(email)s AND internal_id = %(internal_id)s;""", - {"email": email, "internal_id": internal_id}) - - cur.execute(query) - r = cur.fetchone() - - if r is not None: - r = helper.dict_to_camel_case(r) - jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['userId'])) - return authorizers.generate_jwt(r['userId'], r['tenantId'], - iat=jwt_iat, aud=f"front:{helper.get_stage_name()}", - exp=(exp + jwt_iat // 1000) if exp is not None else None) - return None + cur.execute( + cur.mogrify( + f"""SELECT + users.user_id, + users.email, + users.role, + users.name, + users.created_at, + (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, + (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, + (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member + FROM public.users + WHERE users.deleted_at IS NULL + AND users.user_id=%(user_id)s + AND users.tenant_id=%(tenant_id)s + LIMIT 1""", + {"tenant_id": tenant_id, "user_id": user_id}) + ) + return helper.dict_to_camel_case(cur.fetchone()) def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id=None): @@ -772,6 +782,48 @@ def create_sso_user(tenant_id, email, admin, name, origin, role_id, internal_id= return helper.dict_to_camel_case(cur.fetchone()) +def __hard_delete_user(user_id): + with pg_client.PostgresClient() as cur: + query = cur.mogrify( + f"""DELETE FROM public.users + WHERE users.user_id = %(user_id)s AND users.deleted_at IS NOT NULL ;""", + {"user_id": user_id}) + cur.execute(query) + + +def authenticate_sso(email, internal_id, exp=None): + with pg_client.PostgresClient() as cur: + query = cur.mogrify( + f"""SELECT + users.user_id, + users.tenant_id, + users.role, + users.name, + (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, + (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, + (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member, + origin, + role_id, + service_account + FROM public.users AS users + WHERE users.email = %(email)s AND internal_id = %(internal_id)s;""", + {"email": email, "internal_id": internal_id}) + + cur.execute(query) + r = cur.fetchone() + + if r is not None: + r = helper.dict_to_camel_case(r) + if r["serviceAccount"]: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, + detail="service account is not authorized to login") + jwt_iat = TimeUTC.datetime_to_timestamp(change_jwt_iat(r['userId'])) + return authorizers.generate_jwt(r['userId'], r['tenantId'], + iat=jwt_iat, aud=f"front:{helper.get_stage_name()}", + exp=(exp + jwt_iat // 1000) if exp is not None else None) + return None + + def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, internal_id=None): with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""\ @@ -822,35 +874,3 @@ def restore_sso_user(user_id, tenant_id, email, admin, name, origin, role_id, in query ) return helper.dict_to_camel_case(cur.fetchone()) - - -def __hard_delete_user(user_id): - with pg_client.PostgresClient() as cur: - query = cur.mogrify( - f"""DELETE FROM public.users - WHERE users.user_id = %(user_id)s AND users.deleted_at IS NOT NULL ;""", - {"user_id": user_id}) - cur.execute(query) - - -def get_user_role(tenant_id, user_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - f"""SELECT - users.user_id, - users.email, - users.role, - users.name, - users.created_at, - (CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin, - (CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin, - (CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member - FROM public.users - WHERE users.deleted_at IS NULL - AND users.user_id=%(user_id)s - AND users.tenant_id=%(tenant_id)s - LIMIT 1""", - {"tenant_id": tenant_id, "user_id": user_id}) - ) - return helper.dict_to_camel_case(cur.fetchone()) diff --git a/ee/api/chalicelib/core/webhook.py b/ee/api/chalicelib/core/webhook.py index 55405cc0d..9c993a059 100644 --- a/ee/api/chalicelib/core/webhook.py +++ b/ee/api/chalicelib/core/webhook.py @@ -83,6 +83,8 @@ def update(tenant_id, webhook_id, changes, replace_none=False): {"tenant_id": tenant_id, "id": webhook_id, **changes}) ) w = helper.dict_to_camel_case(cur.fetchone()) + if w is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"webhook not found.") w["createdAt"] = TimeUTC.datetime_to_timestamp(w["createdAt"]) if replace_none: for k in w.keys(): @@ -128,20 +130,22 @@ def exists_by_name(tenant_id: int, name: str, exclude_id: Optional[int], return row["exists"] -def add_edit(tenant_id, data, replace_none=None): - if "name" in data and len(data["name"]) > 0 \ - and exists_by_name(name=data["name"], exclude_id=data.get("webhookId"), tenant_id=tenant_id): +def add_edit(tenant_id, data: schemas.WebhookSchema, replace_none=None): + if len(data.name) > 0 \ + and exists_by_name(name=data.name, exclude_id=data.webhook_id): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") - if data.get("webhookId") is not None: - return update(tenant_id=tenant_id, webhook_id=data["webhookId"], - changes={"endpoint": data["endpoint"], - "authHeader": None if "authHeader" not in data else data["authHeader"], - "name": data["name"] if "name" in data else ""}, replace_none=replace_none) + if data.webhook_id is not None: + return update(tenant_id=tenant_id, webhook_id=data.webhook_id, + changes={"endpoint": data.endpoint, + "authHeader": data.auth_header, + "name": data.name}, + replace_none=replace_none) else: return add(tenant_id=tenant_id, - endpoint=data["endpoint"], - auth_header=None if "authHeader" not in data else data["authHeader"], - name=data["name"] if "name" in data else "", replace_none=replace_none) + endpoint=data.endpoint, + auth_header=data.auth_header, + name=data.name, + replace_none=replace_none) def delete(tenant_id, webhook_id): diff --git a/ee/api/chalicelib/utils/SAML2_helper.py b/ee/api/chalicelib/utils/SAML2_helper.py index e01c30509..6ef8ae942 100644 --- a/ee/api/chalicelib/utils/SAML2_helper.py +++ b/ee/api/chalicelib/utils/SAML2_helper.py @@ -5,9 +5,11 @@ from urllib.parse import urlparse from decouple import config from fastapi import Request -from onelogin.saml2.auth import OneLogin_Saml2_Auth from starlette.datastructures import FormData +if config("ENABLE_SSO", cast=bool, default=True): + from onelogin.saml2.auth import OneLogin_Saml2_Auth + SAML2 = { "strict": config("saml_strict", cast=bool, default=True), "debug": config("saml_debug", cast=bool, default=True), diff --git a/ee/api/chalicelib/utils/ch_client.py b/ee/api/chalicelib/utils/ch_client.py index 1c739deaa..c986fe581 100644 --- a/ee/api/chalicelib/utils/ch_client.py +++ b/ee/api/chalicelib/utils/ch_client.py @@ -18,9 +18,10 @@ if config('ch_receive_timeout', cast=int, default=-1) > 0: class ClickHouseClient: __client = None - def __init__(self): + def __init__(self, database=None): self.__client = clickhouse_driver.Client(host=config("ch_host"), - database=config("ch_database", default="default"), + database=database if database else config("ch_database", + default="default"), user=config("ch_user", default="default"), password=config("ch_password", default=""), port=config("ch_port", cast=int), diff --git a/ee/api/chalicelib/utils/events_queue.py b/ee/api/chalicelib/utils/events_queue.py index b715b072e..090c62f7d 100644 --- a/ee/api/chalicelib/utils/events_queue.py +++ b/ee/api/chalicelib/utils/events_queue.py @@ -25,7 +25,7 @@ class EventQueue(): project_id, user_id, element = self.events.get() params[f'project_id_{i}'] = project_id params[f'user_id_{i}'] = user_id - for _key, _val in element.dict().items(): + for _key, _val in element.model_dump().items(): if _key == 'data': params[f'{_key}_{i}'] = json.dumps(_val) if 'sessionId' in _val.keys(): @@ -77,7 +77,7 @@ async def terminate(): logging.info('> queue fulshed') # def __process_schema(trace): -# data = trace.dict() +# data = trace.model_dump() # data["parameters"] = json.dumps(trace.parameters) if trace.parameters is not None and len( # trace.parameters.keys()) > 0 else None # data["payload"] = json.dumps(trace.payload) if trace.payload is not None and len(trace.payload.keys()) > 0 else None diff --git a/ee/api/clean-dev.sh b/ee/api/clean-dev.sh index 1fe39ad95..f8d350399 100755 --- a/ee/api/clean-dev.sh +++ b/ee/api/clean-dev.sh @@ -82,7 +82,6 @@ rm -rf ./db_changes.sql rm -rf ./Dockerfile_bundle rm -rf ./entrypoint.bundle.sh rm -rf ./chalicelib/core/heatmaps.py -rm -rf ./schemas.py rm -rf ./routers/subs/v1_api.py #exp rm -rf ./chalicelib/core/custom_metrics.py rm -rf ./chalicelib/core/performance_event.py @@ -91,3 +90,5 @@ rm -rf ./app_alerts.py rm -rf ./build_alerts.sh rm -rf ./run-dev.sh rm -rf ./run-alerts-dev.sh +rm -rf ./schemas/overrides.py +rm -rf ./schemas/schemas.py \ No newline at end of file diff --git a/ee/api/or_dependencies.py b/ee/api/or_dependencies.py index 2671948d8..3f10cbc2e 100644 --- a/ee/api/or_dependencies.py +++ b/ee/api/or_dependencies.py @@ -10,12 +10,12 @@ from starlette.exceptions import HTTPException from starlette.requests import Request from starlette.responses import Response, JSONResponse -import schemas_ee +import schemas from chalicelib.utils import helper from chalicelib.core import traces -async def OR_context(request: Request) -> schemas_ee.CurrentContext: +async def OR_context(request: Request) -> schemas.CurrentContext: if hasattr(request.state, "currentContext"): return request.state.currentContext else: @@ -55,11 +55,20 @@ class ORRoute(APIRoute): return custom_route_handler -def __check(security_scopes: SecurityScopes, context: schemas_ee.CurrentContext = Depends(OR_context)): +def __check(security_scopes: SecurityScopes, context: schemas.CurrentContext = Depends(OR_context)): + s_p = 0 for scope in security_scopes.scopes: + if isinstance(scope, schemas.ServicePermissions): + s_p += 1 + if context.service_account and not isinstance(scope, schemas.ServicePermissions) \ + or not context.service_account and not isinstance(scope, schemas.Permissions): + continue if scope not in context.permissions: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Not enough permissions") + if context.service_account and s_p == 0: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, + detail="Not enough permissions (service account)") def OR_scope(*scopes): diff --git a/ee/api/requirements-alerts.txt b/ee/api/requirements-alerts.txt index 934130912..38c658c2f 100644 --- a/ee/api/requirements-alerts.txt +++ b/ee/api/requirements-alerts.txt @@ -1,20 +1,20 @@ -requests==2.31.0 +# Keep this version to not have conflicts between requests and boto3 urllib3==1.26.16 -boto3==1.26.148 -pyjwt==2.7.0 -psycopg2-binary==2.9.6 -elasticsearch==8.8.0 -jira==3.5.1 +requests==2.31.0 +boto3==1.28.35 +pyjwt==2.8.0 +psycopg2-binary==2.9.7 +elasticsearch==8.9.0 +jira==3.5.2 -fastapi==0.97.0 -uvicorn[standard]==0.22.0 +fastapi==0.103.0 +uvicorn[standard]==0.23.2 python-decouple==3.8 -pydantic[email]==1.10.8 -apscheduler==3.10.1 +pydantic[email]==2.3.0 +apscheduler==3.10.4 -clickhouse-driver==0.2.6 clickhouse-driver[lz4]==0.2.6 -python-multipart==0.0.5 -azure-storage-blob==12.16.0 \ No newline at end of file +python-multipart==0.0.6 +azure-storage-blob==12.17.0 \ No newline at end of file diff --git a/ee/api/requirements-crons.txt b/ee/api/requirements-crons.txt index d87d54e34..5a24452a2 100644 --- a/ee/api/requirements-crons.txt +++ b/ee/api/requirements-crons.txt @@ -1,18 +1,19 @@ -requests==2.31.0 +# Keep this version to not have conflicts between requests and boto3 urllib3==1.26.16 -boto3==1.26.148 -pyjwt==2.7.0 -psycopg2-binary==2.9.6 -elasticsearch==8.8.0 -jira==3.5.1 +requests==2.31.0 +boto3==1.28.35 +pyjwt==2.8.0 +psycopg2-binary==2.9.7 +elasticsearch==8.9.0 +jira==3.5.2 -fastapi==0.97.0 + +fastapi==0.103.0 python-decouple==3.8 -pydantic[email]==1.10.8 -apscheduler==3.10.1 +pydantic[email]==2.3.0 +apscheduler==3.10.4 -clickhouse-driver==0.2.5 -clickhouse-driver[lz4]==0.2.5 -redis==4.5.5 -azure-storage-blob==12.16.0 +clickhouse-driver[lz4]==0.2.6 +redis==5.0.0 +azure-storage-blob==12.17.0 diff --git a/ee/api/requirements.txt b/ee/api/requirements.txt index 0f2fa5e95..4b5ba3d5e 100644 --- a/ee/api/requirements.txt +++ b/ee/api/requirements.txt @@ -1,21 +1,21 @@ -requests==2.31.0 +# Keep this version to not have conflicts between requests and boto3 urllib3==1.26.16 -boto3==1.26.148 -pyjwt==2.7.0 -psycopg2-binary==2.9.6 -elasticsearch==8.8.0 -jira==3.5.1 +requests==2.31.0 +boto3==1.28.40 +pyjwt==2.8.0 +psycopg2-binary==2.9.7 +elasticsearch==8.9.0 +jira==3.5.2 -fastapi==0.97.0 -uvicorn[standard]==0.22.0 -gunicorn==20.1.0 +fastapi==0.103.1 +uvicorn[standard]==0.23.2 +gunicorn==21.2.0 python-decouple==3.8 -pydantic[email]==1.10.8 -apscheduler==3.10.1 +pydantic[email]==2.3.0 +apscheduler==3.10.4 -clickhouse-driver==0.2.6 clickhouse-driver[lz4]==0.2.6 # TODO: enable after xmlsec fix https://github.com/xmlsec/python-xmlsec/issues/252 #--no-binary is used to avoid libxml2 library version incompatibilities between xmlsec and lxml @@ -23,6 +23,6 @@ clickhouse-driver[lz4]==0.2.6 python3-saml==1.15.0 python-multipart==0.0.6 -redis==4.5.5 +redis==5.0.0 #confluent-kafka==2.1.0 -azure-storage-blob==12.16.0 +azure-storage-blob==12.17.0 diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index 70717821c..ac9c7336a 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -6,7 +6,6 @@ from fastapi import HTTPException, status from starlette.responses import RedirectResponse, FileResponse import schemas -import schemas_ee from chalicelib.core import sessions, assist, heatmaps, sessions_favorite, sessions_assignments, errors, errors_viewed, \ errors_favorite, sessions_notes, click_maps, sessions_replay, signup, feature_flags from chalicelib.core import sessions_viewed @@ -18,9 +17,11 @@ from chalicelib.utils import captcha from chalicelib.utils import helper from chalicelib.utils.TimeUTC import TimeUTC from or_dependencies import OR_context, OR_scope -from routers import saml from routers.base import get_routers -from schemas_ee import Permissions +from schemas import Permissions, ServicePermissions + +if config("ENABLE_SSO", cast=bool, default=True): + from routers import saml public_app, app, app_apikey = get_routers() @@ -49,7 +50,7 @@ def login_user(data: schemas.UserLoginSchema = Body(...)): detail="Invalid captcha." ) - r = users.authenticate(data.email, data.password) + r = users.authenticate(data.email, data.password.get_secret_value()) if r is None: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, @@ -132,9 +133,9 @@ def edit_slack_integration(integrationId: int, data: schemas.EditCollaborationSc @app.post('/client/members', tags=["client"]) -def add_member(background_tasks: BackgroundTasks, data: schemas_ee.CreateMemberSchema = Body(...), +def add_member(background_tasks: BackgroundTasks, data: schemas.CreateMemberSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data.dict(), + return users.create_member(tenant_id=context.tenant_id, user_id=context.user_id, data=data, background_tasks=background_tasks) @@ -167,11 +168,12 @@ def change_password_by_invitation(data: schemas.EditPasswordByInvitationSchema = if user["expiredChange"]: return {"errors": ["expired change, please re-use the invitation link"]} - return users.set_password_invitation(new_password=data.password, user_id=user["userId"], tenant_id=user["tenantId"]) + return users.set_password_invitation(new_password=data.password.get_secret_value(), user_id=user["userId"], + tenant_id=user["tenantId"]) @app.put('/client/members/{memberId}', tags=["client"]) -def edit_member(memberId: int, data: schemas_ee.EditMemberSchema, +def edit_member(memberId: int, data: schemas.EditMemberSchema, context: schemas.CurrentContext = Depends(OR_context)): return users.edit_member(tenant_id=context.tenant_id, editor_id=context.user_id, changes=data, user_id_to_update=memberId) @@ -203,11 +205,13 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)): # for backward compatibility @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)]) def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): - if isinstance(sessionId, str): + if not sessionId.isnumeric(): return {"errors": ["session not found"]} + else: + sessionId = int(sessionId) data = sessions_replay.get_by_id2_pg(project_id=projectId, session_id=sessionId, full_data=True, include_fav_viewed=True, group_metadata=True, context=context) if data is None: @@ -220,12 +224,30 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba } +@app.post('/{projectId}/sessions/search', tags=["sessions"], + dependencies=[OR_scope(Permissions.session_replay)]) +def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id) + return {'data': data} + + +@app.post('/{projectId}/sessions/search/ids', tags=["sessions"], + dependencies=[OR_scope(Permissions.session_replay)]) +def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True) + return {'data': data} + + @app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)]) def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): - if isinstance(sessionId, str): + if not sessionId.isnumeric(): return {"errors": ["session not found"]} + else: + sessionId = int(sessionId) data = sessions_replay.get_replay(project_id=projectId, session_id=sessionId, full_data=True, include_fav_viewed=True, group_metadata=True, context=context) if data is None: @@ -239,11 +261,13 @@ def get_session_events(projectId: int, sessionId: Union[int, str], background_ta @app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)]) def get_session_events(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): - if isinstance(sessionId, str): + if not sessionId.isnumeric(): return {"errors": ["session not found"]} + else: + sessionId = int(sessionId) data = sessions_replay.get_events(project_id=projectId, session_id=sessionId) if data is None: return {"errors": ["session not found"]} @@ -265,18 +289,6 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str, } -@app.post('/{projectId}/errors/search', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) -def errors_search(projectId: int, data: schemas.SearchErrorsSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return {"data": errors.search(data, projectId, user_id=context.user_id)} - - -@app.get('/{projectId}/errors/stats', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) -def errors_stats(projectId: int, startTimestamp: int, endTimestamp: int, - context: schemas.CurrentContext = Depends(OR_context)): - return errors.stats(projectId, user_id=context.user_id, startTimestamp=startTimestamp, endTimestamp=endTimestamp) - - @app.get('/{projectId}/errors/{errorId}', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): @@ -288,15 +300,6 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun return data -@app.get('/{projectId}/errors/{errorId}/stats', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) -def errors_get_details_right_column(projectId: int, errorId: str, startDate: int = TimeUTC.now(-7), - endDate: int = TimeUTC.now(), density: int = 7, - context: schemas.CurrentContext = Depends(OR_context)): - data = errors.get_details_chart(project_id=projectId, user_id=context.user_id, error_id=errorId, - **{"startDate": startDate, "endDate": endDate, "density": density}) - return data - - @app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) def errors_get_details_sourcemaps(projectId: int, errorId: str, context: schemas.CurrentContext = Depends(OR_context)): @@ -326,9 +329,10 @@ def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDa return {"errors": ["undefined action"]} -@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"], dependencies=[OR_scope(Permissions.assist_live)]) +@app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"], + dependencies=[OR_scope(Permissions.assist_live, ServicePermissions.assist_live)]) def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, - context: schemas_ee.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) if data is None: data = sessions_replay.get_replay(context=context, project_id=projectId, session_id=sessionId, @@ -342,13 +346,15 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun @app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"], - dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, + ServicePermissions.assist_live, ServicePermissions.session_replay)]) def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Replay file not found"]} - if isinstance(sessionId, str): - print(f"{sessionId} not a valid number.") + if not sessionId.isnumeric(): return not_found + else: + sessionId = int(sessionId) if not sessions.session_exists(project_id=projectId, session_id=sessionId): print(f"{projectId}/{sessionId} not found in DB.") if not assist.session_exists(project_id=projectId, session_id=sessionId): @@ -363,13 +369,16 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], @app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"], - dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools)]) + dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools, + ServicePermissions.assist_live, ServicePermissions.session_replay, + ServicePermissions.dev_tools)]) def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Devtools file not found"]} - if isinstance(sessionId, str): - print(f"{sessionId} not a valid number.") + if not sessionId.isnumeric(): return not_found + else: + sessionId = int(sessionId) if not sessions.session_exists(project_id=projectId, session_id=sessionId): print(f"{projectId}/{sessionId} not found in DB.") if not assist.session_exists(project_id=projectId, session_id=sessionId): @@ -392,7 +401,7 @@ def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"], dependencies=[OR_scope(Permissions.session_replay)]) def add_remove_favorite_session2(projectId: int, sessionId: int, - context: schemas_ee.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): return sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId) @@ -511,7 +520,7 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), @app.post('/{projectId}/click_maps/search', tags=["click maps"], dependencies=[OR_scope(Permissions.session_replay)]) -def click_map_search(projectId: int, data: schemas.FlatClickMapSessionsSearch = Body(...), +def click_map_search(projectId: int, data: schemas.ClickMapSessionsSearch = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": click_maps.search_short_session(user_id=context.user_id, data=data, project_id=projectId)} @@ -547,7 +556,7 @@ def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.Fea @app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"], dependencies=[OR_scope(Permissions.feature_flags)]) def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)): - return feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id) + return {"data": feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)} @app.post('/{project_id}/feature-flags/{feature_flag_id}/status', tags=["feature flags"], diff --git a/ee/api/routers/ee.py b/ee/api/routers/ee.py index f3d0b25cf..863aede80 100644 --- a/ee/api/routers/ee.py +++ b/ee/api/routers/ee.py @@ -7,14 +7,14 @@ unlock.check() from or_dependencies import OR_context from routers.base import get_routers -import schemas_ee +import schemas from fastapi import Depends, Body public_app, app, app_apikey = get_routers() @app.get('/client/roles', tags=["client", "roles"]) -def get_roles(context: schemas_ee.CurrentContext = Depends(OR_context)): +def get_roles(context: schemas.CurrentContext = Depends(OR_context)): return { 'data': roles.get_roles(tenant_id=context.tenant_id) } @@ -22,8 +22,8 @@ def get_roles(context: schemas_ee.CurrentContext = Depends(OR_context)): @app.post('/client/roles', tags=["client", "roles"]) @app.put('/client/roles', tags=["client", "roles"]) -def add_role(data: schemas_ee.RolePayloadSchema = Body(...), - context: schemas_ee.CurrentContext = Depends(OR_context)): +def add_role(data: schemas.RolePayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): data = roles.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data) if "errors" in data: return data @@ -35,8 +35,8 @@ def add_role(data: schemas_ee.RolePayloadSchema = Body(...), @app.post('/client/roles/{roleId}', tags=["client", "roles"]) @app.put('/client/roles/{roleId}', tags=["client", "roles"]) -def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...), - context: schemas_ee.CurrentContext = Depends(OR_context)): +def edit_role(roleId: int, data: schemas.RolePayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): data = roles.update(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId, data=data) if "errors" in data: return data @@ -47,7 +47,7 @@ def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...), @app.delete('/client/roles/{roleId}', tags=["client", "roles"]) -def delete_role(roleId: int, _=Body(None), context: schemas_ee.CurrentContext = Depends(OR_context)): +def delete_role(roleId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): data = roles.delete(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId) if "errors" in data: return data @@ -63,48 +63,48 @@ def get_assist_credentials(): @app.post('/trails', tags=["traces", "trails"]) -def get_trails(data: schemas_ee.TrailSearchPayloadSchema = Body(...), - context: schemas_ee.CurrentContext = Depends(OR_context)): +def get_trails(data: schemas.TrailSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): return { 'data': traces.get_all(tenant_id=context.tenant_id, data=data) } @app.post('/trails/actions', tags=["traces", "trails"]) -def get_available_trail_actions(context: schemas_ee.CurrentContext = Depends(OR_context)): +def get_available_trail_actions(context: schemas.CurrentContext = Depends(OR_context)): return {'data': traces.get_available_actions(tenant_id=context.tenant_id)} @app.put('/{projectId}/assist/save', tags=["assist"]) -def sign_record_for_upload(projectId: int, data: schemas_ee.AssistRecordPayloadSchema = Body(...), - context: schemas_ee.CurrentContext = Depends(OR_context)): +def sign_record_for_upload(projectId: int, data: schemas.AssistRecordPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): if not sessions.session_exists(project_id=projectId, session_id=data.session_id): return {"errors": ["Session not found"]} return {"data": assist_records.presign_record(project_id=projectId, data=data, context=context)} @app.put('/{projectId}/assist/save/done', tags=["assist"]) -def save_record_after_upload(projectId: int, data: schemas_ee.AssistRecordSavePayloadSchema = Body(...), - context: schemas_ee.CurrentContext = Depends(OR_context)): +def save_record_after_upload(projectId: int, data: schemas.AssistRecordSavePayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): if not sessions.session_exists(project_id=projectId, session_id=data.session_id): return {"errors": ["Session not found"]} return {"data": {"URL": assist_records.save_record(project_id=projectId, data=data, context=context)}} @app.post('/{projectId}/assist/records', tags=["assist"]) -def search_records(projectId: int, data: schemas_ee.AssistRecordSearchPayloadSchema = Body(...), - context: schemas_ee.CurrentContext = Depends(OR_context)): +def search_records(projectId: int, data: schemas.AssistRecordSearchPayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): return {"data": assist_records.search_records(project_id=projectId, data=data, context=context)} @app.get('/{projectId}/assist/records/{recordId}', tags=["assist"]) -def get_record(projectId: int, recordId: int, context: schemas_ee.CurrentContext = Depends(OR_context)): +def get_record(projectId: int, recordId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": assist_records.get_record(project_id=projectId, record_id=recordId, context=context)} @app.post('/{projectId}/assist/records/{recordId}', tags=["assist"]) -def update_record(projectId: int, recordId: int, data: schemas_ee.AssistRecordUpdatePayloadSchema = Body(...), - context: schemas_ee.CurrentContext = Depends(OR_context)): +def update_record(projectId: int, recordId: int, data: schemas.AssistRecordUpdatePayloadSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): result = assist_records.update_record(project_id=projectId, record_id=recordId, data=data, context=context) if "errors" in result: return result @@ -113,7 +113,7 @@ def update_record(projectId: int, recordId: int, data: schemas_ee.AssistRecordUp @app.delete('/{projectId}/assist/records/{recordId}', tags=["assist"]) def delete_record(projectId: int, recordId: int, _=Body(None), - context: schemas_ee.CurrentContext = Depends(OR_context)): + context: schemas.CurrentContext = Depends(OR_context)): result = assist_records.delete_record(project_id=projectId, record_id=recordId, context=context) if "errors" in result: return result @@ -121,8 +121,8 @@ def delete_record(projectId: int, recordId: int, _=Body(None), @app.post('/{projectId}/signals', tags=['signals']) -def send_interactions(projectId: int, data: schemas_ee.SignalsSchema = Body(...), - context: schemas_ee.CurrentContext = Depends(OR_context)): +def send_interactions(projectId: int, data: schemas.SignalsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): data = signals.handle_frontend_signals_queued(project_id=projectId, user_id=context.user_id, data=data) if "errors" in data: @@ -132,6 +132,6 @@ def send_interactions(projectId: int, data: schemas_ee.SignalsSchema = Body(...) @app.post('/{projectId}/dashboard/insights', tags=["insights"]) @app.post('/{projectId}/dashboard/insights', tags=["insights"]) -def sessions_search(projectId: int, data: schemas_ee.GetInsightsSchema = Body(...), - context: schemas_ee.CurrentContext = Depends(OR_context)): +def sessions_search(projectId: int, data: schemas.GetInsightsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): return {'data': sessions_insights.fetch_selected(data=data, project_id=projectId)} diff --git a/ee/api/routers/subs/insights.py b/ee/api/routers/subs/insights.py index 822e37185..970bcb35f 100644 --- a/ee/api/routers/subs/insights.py +++ b/ee/api/routers/subs/insights.py @@ -4,9 +4,8 @@ import schemas from chalicelib.core import product_analytics from or_dependencies import OR_scope from routers.base import get_routers -from schemas_ee import Permissions -public_app, app, app_apikey = get_routers([OR_scope(Permissions.metrics)]) +public_app, app, app_apikey = get_routers([OR_scope(schemas.Permissions.metrics)]) @app.post('/{projectId}/insights/journey', tags=["insights"]) @@ -14,7 +13,6 @@ public_app, app, app_apikey = get_routers([OR_scope(Permissions.metrics)]) async def get_insights_journey(projectId: int, data: schemas.PathAnalysisSchema = Body(...)): return {"data": product_analytics.path_analysis(project_id=projectId, **data.dict())} - # @app.post('/{projectId}/insights/users_acquisition', tags=["insights"]) # @app.get('/{projectId}/insights/users_acquisition', tags=["insights"]) # async def get_users_acquisition(projectId: int, data: schemas.MetricPayloadSchema = Body(...)): @@ -81,7 +79,6 @@ async def get_insights_journey(projectId: int, data: schemas.PathAnalysisSchema # return {"data": product_analytics.users_slipping(project_id=projectId, **data.dict())} - # # # @app.route('/{projectId}/dashboard/{widget}/search', methods=['GET']) diff --git a/ee/api/routers/subs/metrics.py b/ee/api/routers/subs/metrics.py index 464bb5596..83f037d23 100644 --- a/ee/api/routers/subs/metrics.py +++ b/ee/api/routers/subs/metrics.py @@ -3,17 +3,15 @@ from typing import Union from fastapi import Body, Depends, Request import schemas -import schemas_ee from chalicelib.core import dashboards, custom_metrics, funnels from or_dependencies import OR_context, OR_scope from routers.base import get_routers -from schemas_ee import Permissions -public_app, app, app_apikey = get_routers([OR_scope(Permissions.metrics)]) +public_app, app, app_apikey = get_routers([OR_scope(schemas.Permissions.metrics)]) @app.post('/{projectId}/dashboards', tags=["dashboard"]) -@app.put('/{projectId}/dashboards', tags=["dashboard"]) +# @app.put('/{projectId}/dashboards', tags=["dashboard"]) def create_dashboards(projectId: int, data: schemas.CreateDashboardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return dashboards.create_dashboard(project_id=projectId, user_id=context.user_id, data=data) @@ -32,7 +30,7 @@ def get_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont return {"data": data} -@app.post('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) +# @app.post('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) @app.put('/{projectId}/dashboards/{dashboardId}', tags=["dashboard"]) def update_dashboard(projectId: int, dashboardId: int, data: schemas.EditDashboardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -52,8 +50,8 @@ def pin_dashboard(projectId: int, dashboardId: int, context: schemas.CurrentCont @app.post('/{projectId}/dashboards/{dashboardId}/cards', tags=["cards"]) -@app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) -@app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) +# @app.post('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) +# @app.put('/{projectId}/dashboards/{dashboardId}/widgets', tags=["dashboard"]) def add_card_to_dashboard(projectId: int, dashboardId: int, data: schemas.AddWidgetToDashboardPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -62,16 +60,16 @@ def add_card_to_dashboard(projectId: int, dashboardId: int, @app.post('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) -@app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) +# @app.put('/{projectId}/dashboards/{dashboardId}/metrics', tags=["dashboard"]) def create_metric_and_add_to_dashboard(projectId: int, dashboardId: int, - data: schemas_ee.CardSchema = Body(...), + data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": dashboards.create_metric_add_widget(project_id=projectId, user_id=context.user_id, dashboard_id=dashboardId, data=data)} @app.post('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) -@app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) +# @app.put('/{projectId}/dashboards/{dashboardId}/widgets/{widgetId}', tags=["dashboard"]) def update_widget_in_dashboard(projectId: int, dashboardId: int, widgetId: int, data: schemas.UpdateWidgetPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -98,18 +96,18 @@ def remove_widget_from_dashboard(projectId: int, dashboardId: int, widgetId: int @app.post('/{projectId}/cards/try', tags=["cards"]) -@app.post('/{projectId}/metrics/try', tags=["dashboard"]) -@app.put('/{projectId}/metrics/try', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"]) -@app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"]) -def try_card(projectId: int, data: schemas_ee.CardSchema = Body(...), +# @app.post('/{projectId}/metrics/try', tags=["dashboard"]) +# @app.put('/{projectId}/metrics/try', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/try', tags=["customMetrics"]) +# @app.put('/{projectId}/custom_metrics/try', tags=["customMetrics"]) +def try_card(projectId: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": custom_metrics.merged_live(project_id=projectId, data=data, user_id=context.user_id)} @app.post('/{projectId}/cards/try/sessions', tags=["cards"]) -@app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/try/sessions', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/try/sessions', tags=["customMetrics"]) def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = custom_metrics.try_sessions(project_id=projectId, user_id=context.user_id, data=data) @@ -117,48 +115,50 @@ def try_card_sessions(projectId: int, data: schemas.CardSessionsSchema = Body(.. @app.post('/{projectId}/cards/try/issues', tags=["cards"]) -@app.post('/{projectId}/metrics/try/issues', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/try/issues', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/try/issues', tags=["customMetrics"]) def try_card_funnel_issues(projectId: int, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): if len(data.series) == 0: return {"data": []} - data.series[0].filter.startDate = data.startTimestamp - data.series[0].filter.endDate = data.endTimestamp + data.series[0].filter.startTimestamp = data.startTimestamp + data.series[0].filter.endTimestamp = data.endTimestamp data = funnels.get_issues_on_the_fly_widget(project_id=projectId, data=data.series[0].filter) return {"data": data} @app.get('/{projectId}/cards', tags=["cards"]) -@app.get('/{projectId}/metrics', tags=["dashboard"]) -@app.get('/{projectId}/custom_metrics', tags=["customMetrics"]) +# @app.get('/{projectId}/metrics', tags=["dashboard"]) +# @app.get('/{projectId}/custom_metrics', tags=["customMetrics"]) def get_cards(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": custom_metrics.get_all(project_id=projectId, user_id=context.user_id)} @app.post('/{projectId}/cards', tags=["cards"]) -@app.post('/{projectId}/metrics', tags=["dashboard"]) -@app.put('/{projectId}/metrics', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics', tags=["customMetrics"]) -@app.put('/{projectId}/custom_metrics', tags=["customMetrics"]) -def create_card(projectId: int, data: schemas_ee.CardSchema = Body(...), +# @app.post('/{projectId}/metrics', tags=["dashboard"]) +# @app.put('/{projectId}/metrics', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics', tags=["customMetrics"]) +# @app.put('/{projectId}/custom_metrics', tags=["customMetrics"]) +def create_card(projectId: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return custom_metrics.create(project_id=projectId, user_id=context.user_id, data=data) + return custom_metrics.create_card(project_id=projectId, user_id=context.user_id, data=data) @app.post('/{projectId}/cards/search', tags=["cards"]) -@app.post('/{projectId}/metrics/search', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/search', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/search', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/search', tags=["customMetrics"]) def search_cards(projectId: int, data: schemas.SearchCardsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": custom_metrics.search_all(project_id=projectId, user_id=context.user_id, data=data)} @app.get('/{projectId}/cards/{metric_id}', tags=["cards"]) -@app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) -@app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +# @app.get('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +# @app.get('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) def get_card(projectId: int, metric_id: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): - if not isinstance(metric_id, int): + if metric_id.isnumeric(): + metric_id = int(metric_id) + else: return {"errors": ["invalid card_id"]} data = custom_metrics.get_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id) if data is None: @@ -175,8 +175,8 @@ def get_card(projectId: int, metric_id: Union[int, str], context: schemas.Curren @app.post('/{projectId}/cards/{metric_id}/sessions', tags=["cards"]) -@app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/{metric_id}/sessions', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}/sessions', tags=["customMetrics"]) def get_card_sessions(projectId: int, metric_id: int, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -187,13 +187,15 @@ def get_card_sessions(projectId: int, metric_id: int, @app.post('/{projectId}/cards/{metric_id}/issues', tags=["cards"]) -@app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/{metric_id}/issues', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}/issues', tags=["customMetrics"]) def get_card_funnel_issues(projectId: int, metric_id: Union[int, str], data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - if not isinstance(metric_id, int): - return {"errors": [f"invalid card_id: {metric_id}"]} + if metric_id.isnumeric(): + metric_id = int(metric_id) + else: + return {"errors": ["invalid card_id"]} data = custom_metrics.get_funnel_issues(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) @@ -203,8 +205,8 @@ def get_card_funnel_issues(projectId: int, metric_id: Union[int, str], @app.post('/{projectId}/cards/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) -@app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/{metric_id}/issues/{issueId}/sessions', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}/issues/{issueId}/sessions', tags=["customMetrics"]) def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: str, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -216,22 +218,22 @@ def get_metric_funnel_issue_sessions(projectId: int, metric_id: int, issueId: st @app.post('/{projectId}/cards/{metric_id}/errors', tags=["dashboard"]) -@app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/{metric_id}/errors', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}/errors', tags=["customMetrics"]) def get_custom_metric_errors_list(projectId: int, metric_id: int, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, metric_id=metric_id, - data=data) + data = custom_metrics.get_errors_list(project_id=projectId, user_id=context.user_id, + metric_id=metric_id, data=data) if data is None: return {"errors": ["custom metric not found"]} return {"data": data} @app.post('/{projectId}/cards/{metric_id}/chart', tags=["card"]) -@app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"]) -def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardChartSchema = Body(...), +# @app.post('/{projectId}/metrics/{metric_id}/chart', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}/chart', tags=["customMetrics"]) +def get_card_chart(projectId: int, metric_id: int, request: Request, data: schemas.CardSessionsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = custom_metrics.make_chart_from_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) @@ -239,25 +241,25 @@ def get_card_chart(projectId: int, metric_id: int, request: Request, data: schem @app.post('/{projectId}/cards/{metric_id}', tags=["dashboard"]) -@app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) -@app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) -@app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) -def update_custom_metric(projectId: int, metric_id: int, data: schemas_ee.UpdateCardSchema = Body(...), +# @app.post('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +# @app.put('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +# @app.put('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +def update_custom_metric(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - data = custom_metrics.update(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) + data = custom_metrics.update_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id, data=data) if data is None: return {"errors": ["custom metric not found"]} return {"data": data} @app.post('/{projectId}/cards/{metric_id}/status', tags=["dashboard"]) -@app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) -@app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) -@app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) -@app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) +# @app.post('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) +# @app.put('/{projectId}/metrics/{metric_id}/status', tags=["dashboard"]) +# @app.post('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) +# @app.put('/{projectId}/custom_metrics/{metric_id}/status', tags=["customMetrics"]) def update_custom_metric_state(projectId: int, metric_id: int, - data: schemas.UpdateCustomMetricsStatusSchema = Body(...), + data: schemas.UpdateCardStatusSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return { "data": custom_metrics.change_state(project_id=projectId, user_id=context.user_id, metric_id=metric_id, @@ -265,8 +267,8 @@ def update_custom_metric_state(projectId: int, metric_id: int, @app.delete('/{projectId}/cards/{metric_id}', tags=["dashboard"]) -@app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) -@app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) +# @app.delete('/{projectId}/metrics/{metric_id}', tags=["dashboard"]) +# @app.delete('/{projectId}/custom_metrics/{metric_id}', tags=["customMetrics"]) def delete_custom_metric(projectId: int, metric_id: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": custom_metrics.delete(project_id=projectId, user_id=context.user_id, metric_id=metric_id)} + return {"data": custom_metrics.delete_card(project_id=projectId, user_id=context.user_id, metric_id=metric_id)} diff --git a/ee/api/schemas/__init__.py b/ee/api/schemas/__init__.py new file mode 100644 index 000000000..46626a1ec --- /dev/null +++ b/ee/api/schemas/__init__.py @@ -0,0 +1,3 @@ +from .schemas import * +from .schemas_ee import * +from . import overrides as _overrides diff --git a/ee/api/schemas_ee.py b/ee/api/schemas/schemas_ee.py similarity index 60% rename from ee/api/schemas_ee.py rename to ee/api/schemas/schemas_ee.py index 96f6c8aae..61d872535 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas/schemas_ee.py @@ -1,11 +1,12 @@ -from enum import Enum from typing import Optional, List, Union, Literal -from pydantic import BaseModel, Field, EmailStr -from pydantic import root_validator, validator +from pydantic import Field, EmailStr, field_validator, model_validator -import schemas +from . import schemas from chalicelib.utils.TimeUTC import TimeUTC +from .overrides import BaseModel, Enum +from .overrides import transform_email, remove_whitespace, remove_duplicate_values, \ + single_to_list, ORUnion class Permissions(str, Enum): @@ -18,8 +19,16 @@ class Permissions(str, Enum): feature_flags = "FEATURE_FLAGS" +class ServicePermissions(str, Enum): + session_replay = "SERVICE_SESSION_REPLAY" + dev_tools = "SERVICE_DEV_TOOLS" + assist_live = "SERVICE_ASSIST_LIVE" + assist_call = "SERVICE_ASSIST_CALL" + + class CurrentContext(schemas.CurrentContext): - permissions: List[Optional[Permissions]] = Field(...) + permissions: List[Union[Permissions, ServicePermissions]] = Field(...) + service_account: bool = Field(default=False) class RolePayloadSchema(BaseModel): @@ -28,10 +37,7 @@ class RolePayloadSchema(BaseModel): permissions: List[Permissions] = Field(...) all_projects: bool = Field(default=True) projects: List[int] = Field(default=[]) - _transform_name = validator('name', pre=True, allow_reuse=True)(schemas.remove_whitespace) - - class Config: - alias_generator = schemas.attribute_to_camel_case + _transform_name = field_validator('name', mode="before")(remove_whitespace) class SignalsSchema(BaseModel): @@ -55,9 +61,6 @@ class GetInsightsSchema(schemas._TimedSchema): metricValue: List[InsightCategories] = Field(default=[]) series: List[schemas.CardSeriesSchema] = Field(default=[]) - class Config: - alias_generator = schemas.attribute_to_camel_case - class CreateMemberSchema(schemas.CreateMemberSchema): roleId: Optional[int] = Field(None) @@ -79,7 +82,7 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema): action: Optional[str] = Field(default=None) order: schemas.SortOrderType = Field(default=schemas.SortOrderType.desc) - @root_validator(pre=True) + @model_validator(mode="before") def transform_order(cls, values): if values.get("order") is None: values["order"] = schemas.SortOrderType.desc @@ -87,9 +90,6 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema): values["order"] = values["order"].upper() return values - class Config: - alias_generator = schemas.attribute_to_camel_case - class SessionModel(BaseModel): viewed: bool = Field(default=False) @@ -119,16 +119,13 @@ class SessionModel(BaseModel): class AssistRecordUpdatePayloadSchema(BaseModel): name: str = Field(..., min_length=1) - _transform_name = validator('name', pre=True, allow_reuse=True)(schemas.remove_whitespace) + _transform_name = field_validator('name', mode="before")(remove_whitespace) class AssistRecordPayloadSchema(AssistRecordUpdatePayloadSchema): duration: int = Field(...) session_id: int = Field(...) - class Config: - alias_generator = schemas.attribute_to_camel_case - class AssistRecordSavePayloadSchema(AssistRecordPayloadSchema): key: str = Field(...) @@ -139,41 +136,14 @@ class AssistRecordSearchPayloadSchema(schemas._PaginatedSchema, schemas._TimedSc query: Optional[str] = Field(default=None) order: Literal["asc", "desc"] = Field(default="desc") - class Config: - alias_generator = schemas.attribute_to_camel_case - # TODO: move these to schema when Insights is supported on PG -class MetricOfInsights(str, Enum): - issue_categories = "issueCategories" +class CardInsights(schemas.CardInsights): + metric_value: List[InsightCategories] = Field(default=[]) - -class CardSchema(schemas.CardSchema): - metric_of: Union[schemas.MetricOfTimeseries, schemas.MetricOfTable, \ - schemas.MetricOfErrors, schemas.MetricOfPerformance, \ - schemas.MetricOfResources, schemas.MetricOfWebVitals, \ - schemas.MetricOfClickMap, MetricOfInsights] = Field(default=schemas.MetricOfTable.user_id) - metric_value: List[Union[schemas.IssueType, InsightCategories]] = Field(default=[]) - - @root_validator + @model_validator(mode='after') def restrictions(cls, values): return values - @root_validator - def validator(cls, values): - values = super().validator(values) - if values.get("metric_type") == schemas.MetricType.insights: - assert values.get("view_type") == schemas.MetricOtherViewType.list_chart, \ - f"viewType must be 'list' for metricOf:{values.get('metric_of')}" - assert isinstance(values.get("metric_of"), MetricOfInsights), \ - f"metricOf must be of type {MetricOfInsights} for metricType:{schemas.MetricType.insights}" - if values.get("metric_value") is not None and len(values.get("metric_value")) > 0: - for i in values.get("metric_value"): - assert isinstance(i, InsightCategories), \ - f"metricValue should be of type [InsightCategories] for metricType:{schemas.MetricType.insights}" - return values - - -class UpdateCardSchema(CardSchema): - series: List[schemas.CardUpdateSeriesSchema] = Field(...) +CardSchema = ORUnion(Union[schemas.__cards_union_base, CardInsights], discriminator='metric_type') diff --git a/ee/assist/package.json b/ee/assist/package.json index 9e1ca0331..b707412b4 100644 --- a/ee/assist/package.json +++ b/ee/assist/package.json @@ -18,13 +18,13 @@ }, "homepage": "https://github.com/openreplay/openreplay#readme", "dependencies": { - "@maxmind/geoip2-node": "^3.5.0", - "@socket.io/redis-adapter": "^8.1.0", + "@maxmind/geoip2-node": "^4.2.0", + "@socket.io/redis-adapter": "^8.2.1", "express": "^4.18.2", - "jsonwebtoken": "^9.0.0", - "redis": "^4.6.6", - "socket.io": "^4.6.1", + "jsonwebtoken": "^9.0.1", + "redis": "^4.6.7", + "socket.io": "^4.7.2", "ua-parser-js": "^1.0.35", - "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.23.0" + "uWebSockets.js": "github:uNetworking/uWebSockets.js#v20.31.0" } } diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/1.15.0/1.15.0.sql b/ee/scripts/schema/db/init_dbs/clickhouse/1.15.0/1.15.0.sql new file mode 100644 index 000000000..d8efc9c6d --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/clickhouse/1.15.0/1.15.0.sql @@ -0,0 +1,7 @@ +CREATE OR REPLACE FUNCTION openreplay_version AS() -> 'v1.15.0-ee'; + +ALTER TABLE experimental.events + ADD COLUMN IF NOT EXISTS transfer_size Nullable(UInt32); + +ALTER TABLE experimental.sessions + ADD COLUMN IF NOT EXISTS timezone LowCardinality(Nullable(String)); \ No newline at end of file diff --git a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql index 8048ddce5..87ed021bb 100644 --- a/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/clickhouse/create/init_schema.sql @@ -79,6 +79,7 @@ CREATE TABLE IF NOT EXISTS experimental.events issue_id Nullable(String), error_tags_keys Array(String), error_tags_values Array(Nullable(String)), + transfer_size Nullable(UInt32), message_id UInt64 DEFAULT 0, _timestamp DateTime DEFAULT now() ) ENGINE = ReplacingMergeTree(_timestamp) @@ -131,6 +132,7 @@ CREATE TABLE IF NOT EXISTS experimental.sessions user_state LowCardinality(String), platform Enum8('web'=1,'ios'=2,'android'=3) DEFAULT 'web', datetime DateTime, + timezone LowCardinality(Nullable(String)), duration UInt32, pages_count UInt16, events_count UInt16, @@ -273,6 +275,7 @@ SELECT session_id, issue_id, error_tags_keys, error_tags_values, + transfer_size, message_id, _timestamp FROM experimental.events diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.14.0/1.14.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.14.0/1.14.0.sql index 76f42aa88..012aa445b 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.14.0/1.14.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.14.0/1.14.0.sql @@ -71,6 +71,14 @@ UPDATE public.roles SET permissions = (SELECT array_agg(distinct e) FROM unnest(permissions || '{FEATURE_FLAGS}') AS e) where not permissions @> '{FEATURE_FLAGS}'; +ALTER TYPE public.user_role ADD VALUE IF NOT EXISTS 'service'; + +ALTER TABLE IF EXISTS public.users + ADD COLUMN IF NOT EXISTS service_account bool NOT NULL DEFAULT FALSE; + +ALTER TABLE IF EXISTS public.roles + ADD COLUMN IF NOT EXISTS service_role bool NOT NULL DEFAULT FALSE; + COMMIT; \elif :is_next diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.15.0/1.15.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.15.0/1.15.0.sql new file mode 100644 index 000000000..4584237ec --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.15.0/1.15.0.sql @@ -0,0 +1,36 @@ +\set previous_version 'v1.14.0-ee' +\set next_version 'v1.15.0-ee' +SELECT openreplay_version() AS current_version, + openreplay_version() = :'previous_version' AS valid_previous, + openreplay_version() = :'next_version' AS is_next +\gset + +\if :valid_previous +\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version' +BEGIN; +SELECT format($fn_def$ +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT '%1$s' +$$ LANGUAGE sql IMMUTABLE; +$fn_def$, :'next_version') +\gexec + +-- +ALTER TABLE IF EXISTS events_common.requests + ADD COLUMN transfer_size bigint NULL; + +ALTER TABLE IF EXISTS public.sessions + ADD COLUMN IF NOT EXISTS timezone text NULL; + +ALTER TABLE IF EXISTS public.projects + ADD COLUMN IF NOT EXISTS platform public.platform NOT NULL DEFAULT 'web'; + +COMMIT; + +\elif :is_next +\echo new version detected :'next_version', nothing to do +\else +\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version' +\endif \ No newline at end of file diff --git a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql index dffbd81f4..7ab7f0cce 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -8,7 +8,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.14.0-ee' +SELECT 'v1.15.0-ee' $$ LANGUAGE sql IMMUTABLE; @@ -107,6 +107,7 @@ $$ ('dashboards'), ('dashboard_widgets'), ('errors'), + ('errors_tags'), ('integrations'), ('issues'), ('jira_cloud'), @@ -172,32 +173,34 @@ $$ protected bool NOT NULL DEFAULT FALSE, all_projects bool NOT NULL DEFAULT TRUE, created_at timestamp NOT NULL DEFAULT timezone('utc'::text, now()), - deleted_at timestamp NULL DEFAULT NULL + deleted_at timestamp NULL DEFAULT NULL, + service_role bool NOT NULL DEFAULT FALSE ); IF NOT EXISTS(SELECT * FROM pg_type typ WHERE typ.typname = 'user_role') THEN - CREATE TYPE user_role AS ENUM ('owner','admin','member'); + CREATE TYPE user_role AS ENUM ('owner','admin','member','service'); END IF; CREATE TABLE IF NOT EXISTS users ( - user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, - email text NOT NULL UNIQUE, - role user_role NOT NULL DEFAULT 'member', - name text NOT NULL, - created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), - deleted_at timestamp without time zone NULL DEFAULT NULL, - api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL, - jwt_iat timestamp without time zone NULL DEFAULT NULL, - data jsonb NOT NULL DEFAULT'{}'::jsonb, - weekly_report boolean NOT NULL DEFAULT TRUE, - origin text NULL DEFAULT NULL, - role_id integer REFERENCES roles (role_id) ON DELETE SET NULL, - internal_id text NULL DEFAULT NULL + user_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, + email text NOT NULL UNIQUE, + role user_role NOT NULL DEFAULT 'member', + name text NOT NULL, + created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + api_key text UNIQUE DEFAULT generate_api_key(20) NOT NULL, + jwt_iat timestamp without time zone NULL DEFAULT NULL, + data jsonb NOT NULL DEFAULT'{}'::jsonb, + weekly_report boolean NOT NULL DEFAULT TRUE, + origin text NULL DEFAULT NULL, + role_id integer REFERENCES roles (role_id) ON DELETE SET NULL, + internal_id text NULL DEFAULT NULL, + service_account bool NOT NULL DEFAULT FALSE ); CREATE INDEX IF NOT EXISTS users_tenant_id_deleted_at_N_idx ON users (tenant_id) WHERE deleted_at ISNULL; CREATE INDEX IF NOT EXISTS users_name_gin_idx ON users USING GIN (name gin_trgm_ops); @@ -230,12 +233,19 @@ $$ ); CREATE UNIQUE INDEX IF NOT EXISTS oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider); + IF NOT EXISTS(SELECT * + FROM pg_type typ + WHERE typ.typname = 'platform') THEN + CREATE TYPE platform AS ENUM ('web','ios','android'); + END IF; + CREATE TABLE IF NOT EXISTS projects ( project_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, project_key varchar(20) NOT NULL UNIQUE DEFAULT generate_api_key(20), tenant_id integer NOT NULL REFERENCES tenants (tenant_id) ON DELETE CASCADE, name text NOT NULL, + platform platform NOT NULL DEFAULT 'web', active boolean NOT NULL, sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), @@ -487,11 +497,20 @@ $$ CREATE INDEX IF NOT EXISTS user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); CREATE INDEX IF NOT EXISTS user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); - IF NOT EXISTS(SELECT * - FROM pg_type typ - WHERE typ.typname = 'platform') THEN - CREATE TYPE platform AS ENUM ('web','ios','android'); - END IF; + CREATE TABLE IF NOT EXISTS errors_tags + ( + key text NOT NULL, + value text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + session_id bigint NOT NULL, + message_id bigint NOT NULL, + FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE + ); + + CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id); + CREATE INDEX IF NOT EXISTS errors_tags_session_id_idx ON errors_tags (session_id); + CREATE INDEX IF NOT EXISTS errors_tags_message_id_idx ON errors_tags (message_id); IF NOT EXISTS(SELECT * FROM pg_type typ @@ -512,6 +531,7 @@ $$ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, tracker_version text NOT NULL, start_ts bigint NOT NULL, + timezone text NULL, duration integer NULL, rev_id text DEFAULT NULL, platform platform NOT NULL DEFAULT 'web', @@ -1086,21 +1106,6 @@ $$ CREATE INDEX IF NOT EXISTS errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id); CREATE INDEX IF NOT EXISTS errors_error_id_idx ON events.errors (error_id); - CREATE TABLE IF NOT EXISTS errors_tags - ( - key text NOT NULL, - value text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - session_id bigint NOT NULL, - message_id bigint NOT NULL, - FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE - ); - - CREATE INDEX IF NOT EXISTS errors_tags_error_id_idx ON errors_tags (error_id); - CREATE INDEX IF NOT EXISTS errors_tags_session_id_idx ON errors_tags (session_id); - CREATE INDEX IF NOT EXISTS errors_tags_message_id_idx ON errors_tags (message_id); - IF NOT EXISTS(SELECT * FROM pg_type typ WHERE typ.typname = 'http_method') THEN @@ -1280,6 +1285,7 @@ $$ host text NULL, path text NULL, query text NULL, + transfer_size bigint NULL, PRIMARY KEY (session_id, timestamp, seq_index) ); diff --git a/scripts/schema/db/init_dbs/postgresql/1.15.0/1.15.0.sql b/scripts/schema/db/init_dbs/postgresql/1.15.0/1.15.0.sql new file mode 100644 index 000000000..83284abb2 --- /dev/null +++ b/scripts/schema/db/init_dbs/postgresql/1.15.0/1.15.0.sql @@ -0,0 +1,36 @@ +\set previous_version 'v1.14.0' +\set next_version 'v1.15.0' +SELECT openreplay_version() AS current_version, + openreplay_version() = :'previous_version' AS valid_previous, + openreplay_version() = :'next_version' AS is_next +\gset + +\if :valid_previous +\echo valid previous DB version :'previous_version', starting DB upgrade to :'next_version' +BEGIN; +SELECT format($fn_def$ +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT '%1$s' +$$ LANGUAGE sql IMMUTABLE; +$fn_def$, :'next_version') +\gexec + +-- +ALTER TABLE IF EXISTS events_common.requests + ADD COLUMN transfer_size bigint NULL; + +ALTER TABLE IF EXISTS public.sessions + ADD COLUMN IF NOT EXISTS timezone text NULL; + +ALTER TABLE IF EXISTS public.projects + ADD COLUMN IF NOT EXISTS platform public.platform NOT NULL DEFAULT 'web'; + +COMMIT; + +\elif :is_next +\echo new version detected :'next_version', nothing to do +\else +\warn skipping DB upgrade of :'next_version', expected previous version :'previous_version', found :'current_version' +\endif \ No newline at end of file diff --git a/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 234826973..91b76a986 100644 --- a/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -8,7 +8,7 @@ CREATE EXTENSION IF NOT EXISTS pgcrypto; CREATE OR REPLACE FUNCTION openreplay_version() RETURNS text AS $$ -SELECT 'v1.14.0' +SELECT 'v1.15.0' $$ LANGUAGE sql IMMUTABLE; @@ -163,6 +163,7 @@ $$ ); CREATE UNIQUE INDEX oauth_authentication_unique_user_id_provider_idx ON oauth_authentication (user_id, provider); + CREATE TYPE platform AS ENUM ('web','ios','android'); CREATE TABLE projects ( @@ -171,6 +172,7 @@ $$ name text NOT NULL, active boolean NOT NULL, sample_rate smallint NOT NULL DEFAULT 100 CHECK (sample_rate >= 0 AND sample_rate <= 100), + platform platform NOT NULL DEFAULT 'web', created_at timestamp without time zone NOT NULL DEFAULT (now() at time zone 'utc'), deleted_at timestamp without time zone NULL DEFAULT NULL, max_session_duration integer NOT NULL DEFAULT 7200000, @@ -375,10 +377,23 @@ $$ CREATE INDEX user_viewed_errors_user_id_idx ON public.user_viewed_errors (user_id); CREATE INDEX user_viewed_errors_error_id_idx ON public.user_viewed_errors (error_id); + CREATE TABLE errors_tags + ( + key text NOT NULL, + value text NOT NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, + session_id bigint NOT NULL, + message_id bigint NOT NULL, + FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE + ); + + CREATE INDEX errors_tags_error_id_idx ON errors_tags (error_id); + CREATE INDEX errors_tags_session_id_idx ON errors_tags (session_id); + CREATE INDEX errors_tags_message_id_idx ON errors_tags (message_id); CREATE TYPE device_type AS ENUM ('desktop', 'tablet', 'mobile', 'other'); CREATE TYPE country AS ENUM ('UN', 'RW', 'SO', 'YE', 'IQ', 'SA', 'IR', 'CY', 'TZ', 'SY', 'AM', 'KE', 'CD', 'DJ', 'UG', 'CF', 'SC', 'JO', 'LB', 'KW', 'OM', 'QA', 'BH', 'AE', 'IL', 'TR', 'ET', 'ER', 'EG', 'SD', 'GR', 'BI', 'EE', 'LV', 'AZ', 'LT', 'SJ', 'GE', 'MD', 'BY', 'FI', 'AX', 'UA', 'MK', 'HU', 'BG', 'AL', 'PL', 'RO', 'XK', 'ZW', 'ZM', 'KM', 'MW', 'LS', 'BW', 'MU', 'SZ', 'RE', 'ZA', 'YT', 'MZ', 'MG', 'AF', 'PK', 'BD', 'TM', 'TJ', 'LK', 'BT', 'IN', 'MV', 'IO', 'NP', 'MM', 'UZ', 'KZ', 'KG', 'TF', 'HM', 'CC', 'PW', 'VN', 'TH', 'ID', 'LA', 'TW', 'PH', 'MY', 'CN', 'HK', 'BN', 'MO', 'KH', 'KR', 'JP', 'KP', 'SG', 'CK', 'TL', 'RU', 'MN', 'AU', 'CX', 'MH', 'FM', 'PG', 'SB', 'TV', 'NR', 'VU', 'NC', 'NF', 'NZ', 'FJ', 'LY', 'CM', 'SN', 'CG', 'PT', 'LR', 'CI', 'GH', 'GQ', 'NG', 'BF', 'TG', 'GW', 'MR', 'BJ', 'GA', 'SL', 'ST', 'GI', 'GM', 'GN', 'TD', 'NE', 'ML', 'EH', 'TN', 'ES', 'MA', 'MT', 'DZ', 'FO', 'DK', 'IS', 'GB', 'CH', 'SE', 'NL', 'AT', 'BE', 'DE', 'LU', 'IE', 'MC', 'FR', 'AD', 'LI', 'JE', 'IM', 'GG', 'SK', 'CZ', 'NO', 'VA', 'SM', 'IT', 'SI', 'ME', 'HR', 'BA', 'AO', 'NA', 'SH', 'BV', 'BB', 'CV', 'GY', 'GF', 'SR', 'PM', 'GL', 'PY', 'UY', 'BR', 'FK', 'GS', 'JM', 'DO', 'CU', 'MQ', 'BS', 'BM', 'AI', 'TT', 'KN', 'DM', 'AG', 'LC', 'TC', 'AW', 'VG', 'VC', 'MS', 'MF', 'BL', 'GP', 'GD', 'KY', 'BZ', 'SV', 'GT', 'HN', 'NI', 'CR', 'VE', 'EC', 'CO', 'PA', 'HT', 'AR', 'CL', 'BO', 'PE', 'MX', 'PF', 'PN', 'KI', 'TK', 'TO', 'WF', 'WS', 'NU', 'MP', 'GU', 'PR', 'VI', 'UM', 'AS', 'CA', 'US', 'PS', 'RS', 'AQ', 'SX', 'CW', 'BQ', 'SS','AC','AN','BU','CP','CS','CT','DD','DG','DY','EA','FQ','FX','HV','IC','JT','MI','NH','NQ','NT','PC','PU','PZ','RH','SU','TA','TP','VD','WK','YD','YU','ZR'); - CREATE TYPE platform AS ENUM ('web','ios','android'); CREATE TABLE sessions ( @@ -386,6 +401,7 @@ $$ project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, tracker_version text NOT NULL, start_ts bigint NOT NULL, + timezone text NULL, duration integer NULL, rev_id text DEFAULT NULL, platform platform NOT NULL DEFAULT 'web', @@ -560,6 +576,7 @@ $$ host text NULL, path text NULL, query text NULL, + transfer_size bigint NULL, PRIMARY KEY (session_id, timestamp, seq_index) ); @@ -687,20 +704,6 @@ $$ CREATE INDEX errors_error_id_timestamp_session_id_idx ON events.errors (error_id, timestamp, session_id); CREATE INDEX errors_error_id_idx ON events.errors (error_id); - CREATE TABLE errors_tags - ( - key text NOT NULL, - value text NOT NULL, - created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), - error_id text NOT NULL REFERENCES errors (error_id) ON DELETE CASCADE, - session_id bigint NOT NULL, - message_id bigint NOT NULL, - FOREIGN KEY (session_id, message_id) REFERENCES events.errors (session_id, message_id) ON DELETE CASCADE - ); - - CREATE INDEX errors_tags_error_id_idx ON errors_tags (error_id); - CREATE INDEX errors_tags_session_id_idx ON errors_tags (session_id); - CREATE INDEX errors_tags_message_id_idx ON errors_tags (message_id); CREATE TABLE events.graphql ( diff --git a/sourcemap-reader/package.json b/sourcemap-reader/package.json index fbbd096c8..93c756a8a 100644 --- a/sourcemap-reader/package.json +++ b/sourcemap-reader/package.json @@ -18,8 +18,8 @@ }, "homepage": "https://github.com/openreplay/openreplay#readme", "dependencies": { - "@azure/storage-blob": "^12.14.0", - "aws-sdk": "^2.1385.0", + "@azure/storage-blob": "^12.15.0", + "aws-sdk": "^2.1440.0", "express": "^4.18.2", "source-map": "^0.7.4" }