From 42575f9e5b9d9da8a7975b9065db1b6fa15de8a7 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 20 Jan 2023 15:10:40 +0100 Subject: [PATCH 01/19] feat(chalice): fixed order-enum-value --- api/chalicelib/core/sessions.py | 18 +++++++++--------- ee/api/chalicelib/core/sessions.py | 12 ++++++------ ee/api/chalicelib/core/sessions_exp.py | 10 +++++----- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 62abd27ff..9c96d45c6 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -151,7 +151,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.desc.value else: data.order = data.order.upper() if data.sort is not None and data.sort != 'sessionsCount': @@ -168,8 +168,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ count(full_sessions) AS user_sessions_count, jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, MIN(full_sessions.start_ts) AS first_session_ts, - ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn - FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn + ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order.value}) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order.value}) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} @@ -186,7 +186,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ full_args) else: if data.order is None: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.desc.value sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) @@ -195,16 +195,16 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, COALESCE(JSONB_AGG(full_sessions) FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order.value}, issue_score DESC) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", + ORDER BY {sort} {data.order.value}, issue_score DESC) AS full_sessions;""", full_args) - print("--------------------") - print(main_query) - print("--------------------") + # print("--------------------") + # print(main_query) + # print("--------------------") try: cur.execute(main_query) except Exception as err: diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index eadabe400..37106121a 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -153,7 +153,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.desc.value else: data.order = data.order.upper() if data.sort is not None and data.sort != 'sessionsCount': @@ -170,8 +170,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ count(full_sessions) AS user_sessions_count, jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, MIN(full_sessions.start_ts) AS first_session_ts, - ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn - FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn + ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order.value}) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order.value}) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} @@ -188,7 +188,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ full_args) else: if data.order is None: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.desc.value sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) @@ -197,12 +197,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, COALESCE(JSONB_AGG(full_sessions) FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order.value}, issue_score DESC) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", + ORDER BY {sort} {data.order.value}, issue_score DESC) AS full_sessions;""", full_args) # print("--------------------") # print(main_query) diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index cbd1c8252..2cb12188d 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -237,7 +237,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.desc.value else: data.order = data.order.upper() if data.sort is not None and data.sort != 'sessionsCount': @@ -254,8 +254,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ count(full_sessions) AS user_sessions_count, jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, MIN(full_sessions.start_ts) AS first_session_ts, - ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn - FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn + ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order.value}) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order.value}) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} @@ -266,7 +266,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ full_args) else: if data.order is None: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.desc.value sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) @@ -285,7 +285,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ AND _timestamp >= toDateTime(%(startDate)s / 1000)) AS viewed_sessions ON (viewed_sessions.session_id = s.session_id) ) AS raw - ORDER BY sort_key {data.order} + ORDER BY sort_key {data.order.value} LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""", full_args) # print("--------------------") From 4edbdc11836ce94c75dd0ae458166e2e77fd0499 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 20 Jan 2023 15:24:04 +0100 Subject: [PATCH 02/19] feat(chalice): fixed order-enum-value --- api/chalicelib/core/click_maps.py | 2 +- api/chalicelib/core/custom_metrics.py | 2 +- api/chalicelib/core/sessions_notes.py | 2 +- ee/api/chalicelib/core/assist_records.py | 2 +- ee/api/chalicelib/core/custom_metrics.py | 2 +- ee/api/chalicelib/core/sessions_notes.py | 2 +- ee/api/chalicelib/core/traces.py | 4 ++-- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/api/chalicelib/core/click_maps.py b/api/chalicelib/core/click_maps.py index d62417e8f..d3a2b912b 100644 --- a/api/chalicelib/core/click_maps.py +++ b/api/chalicelib/core/click_maps.py @@ -51,7 +51,7 @@ def search_short_session(data: schemas.FlatClickMapSessionsSearch, project_id, u main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} - ORDER BY {data.sort} {data.order} + ORDER BY {data.sort} {data.order.value} LIMIT 1;""", full_args) # print("--------------------") # print(main_query) diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 2ff14004f..060227427 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -396,7 +396,7 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser AND users.user_id = metrics.user_id ) AS owner ON (TRUE) WHERE {" AND ".join(constraints)} - ORDER BY created_at {data.order} + ORDER BY created_at {data.order.value} LIMIT %(limit)s OFFSET %(offset)s;""", params) cur.execute(query) rows = cur.fetchall() diff --git a/api/chalicelib/core/sessions_notes.py b/api/chalicelib/core/sessions_notes.py index cc27b1931..d0f54bd96 100644 --- a/api/chalicelib/core/sessions_notes.py +++ b/api/chalicelib/core/sessions_notes.py @@ -69,7 +69,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se query = cur.mogrify(f"""SELECT sessions_notes.* FROM sessions_notes WHERE {" AND ".join(conditions)} - ORDER BY created_at {data.order} + ORDER BY created_at {data.order.value} LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params}) diff --git a/ee/api/chalicelib/core/assist_records.py b/ee/api/chalicelib/core/assist_records.py index 18449dc14..cda9419f6 100644 --- a/ee/api/chalicelib/core/assist_records.py +++ b/ee/api/chalicelib/core/assist_records.py @@ -61,7 +61,7 @@ def search_records(project_id, data: schemas_ee.AssistRecordSearchPayloadSchema, INNER JOIN projects USING (project_id) LEFT JOIN users USING (user_id) WHERE {" AND ".join(conditions)} - ORDER BY assist_records.created_at {data.order} + ORDER BY assist_records.created_at {data.order.value} LIMIT %(p_limit)s OFFSET %(p_start)s;""", params) cur.execute(query) diff --git a/ee/api/chalicelib/core/custom_metrics.py b/ee/api/chalicelib/core/custom_metrics.py index 5d4ea1c67..02a1607c2 100644 --- a/ee/api/chalicelib/core/custom_metrics.py +++ b/ee/api/chalicelib/core/custom_metrics.py @@ -427,7 +427,7 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser AND users.user_id = metrics.user_id ) AS owner ON (TRUE) WHERE {" AND ".join(constraints)} - ORDER BY created_at {data.order} + ORDER BY created_at {data.order.value} LIMIT %(limit)s OFFSET %(offset)s;""", params) cur.execute(query) rows = cur.fetchall() diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 1a4a1704d..6f3ae1960 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -73,7 +73,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se FROM sessions_notes INNER JOIN users USING (user_id) WHERE {" AND ".join(conditions)} - ORDER BY created_at {data.order} + ORDER BY created_at {data.order.value} LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params}) diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py index 14f26e0b6..52866cc99 100644 --- a/ee/api/chalicelib/core/traces.py +++ b/ee/api/chalicelib/core/traces.py @@ -179,10 +179,10 @@ def get_all(tenant_id, data: schemas_ee.TrailSearchPayloadSchema): COALESCE(JSONB_AGG(full_traces ORDER BY rn) FILTER (WHERE rn > %(p_start)s AND rn <= %(p_end)s), '[]'::JSONB) AS sessions FROM (SELECT traces.*,users.email,users.name AS username, - ROW_NUMBER() OVER (ORDER BY traces.created_at {data.order}) AS rn + ROW_NUMBER() OVER (ORDER BY traces.created_at {data.order.value}) AS rn FROM traces LEFT JOIN users USING (user_id) WHERE {" AND ".join(conditions)} - ORDER BY traces.created_at {data.order}) AS full_traces;""", params) + ORDER BY traces.created_at {data.order.value}) AS full_traces;""", params) ) rows = cur.fetchone() return helper.dict_to_camel_case(rows) From b72dd3fcbf637df8262deebd0175e0c1074ea124 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 20 Jan 2023 15:41:39 +0100 Subject: [PATCH 03/19] feat(chalice): fixed order-enum-value --- api/chalicelib/core/sessions.py | 4 ++-- ee/api/chalicelib/core/sessions.py | 4 ++-- ee/api/chalicelib/core/sessions_exp.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 9c96d45c6..bc8109d2b 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -151,7 +151,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = schemas.SortOrderType.desc.value + data.order = schemas.SortOrderType.desc else: data.order = data.order.upper() if data.sort is not None and data.sort != 'sessionsCount': @@ -186,7 +186,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ full_args) else: if data.order is None: - data.order = schemas.SortOrderType.desc.value + data.order = schemas.SortOrderType.desc sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 37106121a..3c01a72d6 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -153,7 +153,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = schemas.SortOrderType.desc.value + data.order = schemas.SortOrderType.desc else: data.order = data.order.upper() if data.sort is not None and data.sort != 'sessionsCount': @@ -188,7 +188,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ full_args) else: if data.order is None: - data.order = schemas.SortOrderType.desc.value + data.order = schemas.SortOrderType.desc sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index 2cb12188d..dd0c7386f 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -237,7 +237,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = schemas.SortOrderType.desc.value + data.order = schemas.SortOrderType.desc else: data.order = data.order.upper() if data.sort is not None and data.sort != 'sessionsCount': @@ -266,7 +266,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ full_args) else: if data.order is None: - data.order = schemas.SortOrderType.desc.value + data.order = schemas.SortOrderType.desc sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) From 774d917d04dff93fa4b61955cd24d9212784322c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 20 Jan 2023 15:54:21 +0100 Subject: [PATCH 04/19] feat(chalice): fixed order-enum-value --- api/chalicelib/core/sessions.py | 12 ++++++------ ee/api/chalicelib/core/assist_records.py | 2 +- ee/api/chalicelib/core/sessions.py | 12 ++++++------ ee/api/chalicelib/core/sessions_exp.py | 10 +++++----- ee/api/chalicelib/core/sessions_notes.py | 2 +- 5 files changed, 19 insertions(+), 19 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index bc8109d2b..6f5ad3fc1 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -151,7 +151,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.desc.value else: data.order = data.order.upper() if data.sort is not None and data.sort != 'sessionsCount': @@ -168,8 +168,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ count(full_sessions) AS user_sessions_count, jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, MIN(full_sessions.start_ts) AS first_session_ts, - ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order.value}) AS rn - FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order.value}) AS rn + ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} @@ -186,7 +186,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ full_args) else: if data.order is None: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.desc.value sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) @@ -195,12 +195,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, COALESCE(JSONB_AGG(full_sessions) FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order.value}, issue_score DESC) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY {sort} {data.order.value}, issue_score DESC) AS full_sessions;""", + ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", full_args) # print("--------------------") # print(main_query) diff --git a/ee/api/chalicelib/core/assist_records.py b/ee/api/chalicelib/core/assist_records.py index cda9419f6..18449dc14 100644 --- a/ee/api/chalicelib/core/assist_records.py +++ b/ee/api/chalicelib/core/assist_records.py @@ -61,7 +61,7 @@ def search_records(project_id, data: schemas_ee.AssistRecordSearchPayloadSchema, INNER JOIN projects USING (project_id) LEFT JOIN users USING (user_id) WHERE {" AND ".join(conditions)} - ORDER BY assist_records.created_at {data.order.value} + ORDER BY assist_records.created_at {data.order} LIMIT %(p_limit)s OFFSET %(p_start)s;""", params) cur.execute(query) diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 3c01a72d6..2ff245eb3 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -153,7 +153,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.desc.value else: data.order = data.order.upper() if data.sort is not None and data.sort != 'sessionsCount': @@ -170,8 +170,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ count(full_sessions) AS user_sessions_count, jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, MIN(full_sessions.start_ts) AS first_session_ts, - ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order.value}) AS rn - FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order.value}) AS rn + ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} @@ -188,7 +188,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ full_args) else: if data.order is None: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.desc.value sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) @@ -197,12 +197,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ main_query = cur.mogrify(f"""SELECT COUNT(full_sessions) AS count, COALESCE(JSONB_AGG(full_sessions) FILTER (WHERE rn>%(sessions_limit_s)s AND rn<=%(sessions_limit_e)s), '[]'::JSONB) AS sessions - FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order.value}, issue_score DESC) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY {sort} {data.order}, issue_score DESC) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} ORDER BY s.session_id desc) AS filtred_sessions - ORDER BY {sort} {data.order.value}, issue_score DESC) AS full_sessions;""", + ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", full_args) # print("--------------------") # print(main_query) diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index dd0c7386f..fad24c4ac 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -237,7 +237,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.desc.value else: data.order = data.order.upper() if data.sort is not None and data.sort != 'sessionsCount': @@ -254,8 +254,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ count(full_sessions) AS user_sessions_count, jsonb_agg(full_sessions) FILTER (WHERE rn <= 1) AS last_session, MIN(full_sessions.start_ts) AS first_session_ts, - ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order.value}) AS rn - FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order.value}) AS rn + ROW_NUMBER() OVER (ORDER BY {g_sort} {data.order}) AS rn + FROM (SELECT *, ROW_NUMBER() OVER (PARTITION BY user_id ORDER BY {sort} {data.order}) AS rn FROM (SELECT DISTINCT ON(s.session_id) {SESSION_PROJECTION_COLS} {"," if len(meta_keys) > 0 else ""}{",".join([f'metadata_{m["index"]}' for m in meta_keys])} {query_part} @@ -266,7 +266,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ full_args) else: if data.order is None: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.desc.value sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) @@ -285,7 +285,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ AND _timestamp >= toDateTime(%(startDate)s / 1000)) AS viewed_sessions ON (viewed_sessions.session_id = s.session_id) ) AS raw - ORDER BY sort_key {data.order.value} + ORDER BY sort_key {data.order} LIMIT %(sessions_limit)s OFFSET %(sessions_limit_s)s) AS sorted_sessions;""", full_args) # print("--------------------") diff --git a/ee/api/chalicelib/core/sessions_notes.py b/ee/api/chalicelib/core/sessions_notes.py index 6f3ae1960..1a4a1704d 100644 --- a/ee/api/chalicelib/core/sessions_notes.py +++ b/ee/api/chalicelib/core/sessions_notes.py @@ -73,7 +73,7 @@ def get_all_notes_by_project_id(tenant_id, project_id, user_id, data: schemas.Se FROM sessions_notes INNER JOIN users USING (user_id) WHERE {" AND ".join(conditions)} - ORDER BY created_at {data.order.value} + ORDER BY created_at {data.order} LIMIT {data.limit} OFFSET {data.limit * (data.page - 1)};""", {"project_id": project_id, "user_id": user_id, "tenant_id": tenant_id, **extra_params}) From e6ab7d88b4d77a9430f0e973501e481dcdb1fee3 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 20 Jan 2023 16:21:24 +0100 Subject: [PATCH 05/19] feat(chalice): fixed order-enum-value --- api/chalicelib/core/sessions.py | 4 +++- ee/api/chalicelib/core/sessions.py | 4 +++- ee/api/chalicelib/core/sessions_exp.py | 4 +++- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 6f5ad3fc1..ad91076a6 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -153,7 +153,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ if data.order is None: data.order = schemas.SortOrderType.desc.value else: - data.order = data.order.upper() + data.order = data.order.value if data.sort is not None and data.sort != 'sessionsCount': sort = helper.key_to_snake_case(data.sort) g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" @@ -187,6 +187,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ else: if data.order is None: data.order = schemas.SortOrderType.desc.value + else: + data.order = data.order.value sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 2ff245eb3..67877b455 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -155,7 +155,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ if data.order is None: data.order = schemas.SortOrderType.desc.value else: - data.order = data.order.upper() + data.order = data.order.value if data.sort is not None and data.sort != 'sessionsCount': sort = helper.key_to_snake_case(data.sort) g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" @@ -189,6 +189,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ else: if data.order is None: data.order = schemas.SortOrderType.desc.value + else: + data.order = data.order.value sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index fad24c4ac..e0ca6e971 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -239,7 +239,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ if data.order is None: data.order = schemas.SortOrderType.desc.value else: - data.order = data.order.upper() + data.order = data.order.value if data.sort is not None and data.sort != 'sessionsCount': sort = helper.key_to_snake_case(data.sort) g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" @@ -267,6 +267,8 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ else: if data.order is None: data.order = schemas.SortOrderType.desc.value + else: + data.order = data.order.value sort = 'session_id' if data.sort is not None and data.sort != "session_id": # sort += " " + data.order + "," + helper.key_to_snake_case(data.sort) From 0f62d7a613384a2b88bf16f0cfd9797e04314370 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 20 Jan 2023 16:25:20 +0100 Subject: [PATCH 06/19] feat(chalice): built chalice only --- api/build.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/api/build.sh b/api/build.sh index 0e82088fe..ab3519683 100644 --- a/api/build.sh +++ b/api/build.sh @@ -59,11 +59,11 @@ function build_api(){ check_prereq build_api $environment echo buil_complete -IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_alerts.sh $1 - -[[ $environment == "ee" ]] && { - cp ../ee/api/build_crons.sh . - IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_crons.sh $1 - exit_err $? - rm build_crons.sh -} || true +#IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_alerts.sh $1 +# +#[[ $environment == "ee" ]] && { +# cp ../ee/api/build_crons.sh . +# IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_crons.sh $1 +# exit_err $? +# rm build_crons.sh +#} || true From cad42f077447fee2570c064c7aa12fbfebaf36d4 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 20 Jan 2023 16:40:27 +0100 Subject: [PATCH 07/19] feat(chalice): changed build script --- api/build.sh | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/api/build.sh b/api/build.sh index ab3519683..0e82088fe 100644 --- a/api/build.sh +++ b/api/build.sh @@ -59,11 +59,11 @@ function build_api(){ check_prereq build_api $environment echo buil_complete -#IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_alerts.sh $1 -# -#[[ $environment == "ee" ]] && { -# cp ../ee/api/build_crons.sh . -# IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_crons.sh $1 -# exit_err $? -# rm build_crons.sh -#} || true +IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_alerts.sh $1 + +[[ $environment == "ee" ]] && { + cp ../ee/api/build_crons.sh . + IMAGE_TAG=$IMAGE_TAG PUSH_IMAGE=$PUSH_IMAGE DOCKER_REPO=$DOCKER_REPO bash build_crons.sh $1 + exit_err $? + rm build_crons.sh +} || true From a4fb0e259bdaf2f37ac91010f562b26faa494921 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 20 Jan 2023 16:56:08 +0100 Subject: [PATCH 08/19] feat(chalice): fixed order-enum-value for series --- api/schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index 54907c46e..bb19db806 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -847,7 +847,7 @@ class CardSeriesFilterSchema(SearchErrorsSchema): startDate: Optional[int] = Field(default=None) endDate: Optional[int] = Field(default=None) sort: Optional[str] = Field(default=None) - order: Optional[str] = Field(default=None) + order: SortOrderType = Field(default=SortOrderType.desc) group_by_user: Optional[bool] = Field(default=False, const=True) From 94f531dbadb272e5db10c3c7f7c48797b830f08a Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Fri, 20 Jan 2023 18:02:03 +0100 Subject: [PATCH 09/19] feat(chalice): changed card-funnel feat(DB): changed migration script --- api/schemas.py | 4 +++- ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql | 5 +++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/api/schemas.py b/api/schemas.py index bb19db806..60d6c6e43 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -1049,7 +1049,9 @@ class CreateCardSchema(CardChartSchema): assert values.get("metric_value") is None or len(values.get("metric_value")) == 0, \ f"metricValue is only available for metricOf:{MetricOfTable.issues}" elif values.get("metric_type") == MetricType.funnel: - assert len(values["series"]) == 1, f"must have only 1 series for metricType:{MetricType.funnel}" + pass + # allow UI sot send empty series for funnel + # assert len(values["series"]) == 1, f"must have only 1 series for metricType:{MetricType.funnel}" # ignore this for now, let the UI send whatever he wants for metric_of # assert isinstance(values.get("metric_of"), MetricOfTimeseries), \ # f"metricOf must be of type {MetricOfTimeseries} for metricType:{MetricType.funnel}" diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index d46b4ae2a..090cc5b4a 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -325,6 +325,11 @@ $$ LANGUAGE plpgsql; +DROP FUNCTION get_new_filter_key; +DROP FUNCTION get_new_event_filter_key; +DROP FUNCTION get_new_event_key; + + COMMIT; CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_selector_idx ON events.clicks (selector); From 6b9e68829dec609259013983ef2fbfc3b144201c Mon Sep 17 00:00:00 2001 From: MauricioGarciaS <47052044+MauricioGarciaS@users.noreply.github.com> Date: Fri, 20 Jan 2023 18:42:59 +0100 Subject: [PATCH 10/19] Fixed issue in 30 day insight request --- ee/api/chalicelib/core/sessions_insights.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/ee/api/chalicelib/core/sessions_insights.py b/ee/api/chalicelib/core/sessions_insights.py index 62e31bc1b..643a3bf1f 100644 --- a/ee/api/chalicelib/core/sessions_insights.py +++ b/ee/api/chalicelib/core/sessions_insights.py @@ -20,12 +20,14 @@ def _table_where(table, index, value): def _sum_table_index(table, index): + print(f'index {index}') s = 0 count = 0 for row in table: v = row[index] if v is None: continue + print(v) s += v count += 1 return s @@ -213,21 +215,31 @@ def query_most_errors_by_period(project_id, start_time, end_time, table_hh1, table_hh2, columns, this_period_errors, last_period_errors = __get_two_values(res, time_index='hh', name_index='names') del res - + print(table_hh1) + print('\n') + print(table_hh2) + print('\n') new_errors = [x for x in this_period_errors if x not in last_period_errors] common_errors = [x for x in this_period_errors if x not in new_errors] sessions_idx = columns.index('sessions') names_idx = columns.index('names') + + print(_table_where(table_hh1, names_idx, this_period_errors[0])) + percentage_errors = dict() total = _sum_table_index(table_hh1, sessions_idx) # error_increase = dict() new_error_values = dict() error_values = dict() for n in this_period_errors: + if n is None: + continue percentage_errors[n] = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx) - new_error_values[n] = _sum_table_index(_table_where(table_hh1, names_idx, n), names_idx) + new_error_values[n] = _sum_table_index(_table_where(table_hh1, names_idx, n), sessions_idx) for n in common_errors: + if n is None: + continue old_errors = _sum_table_index(_table_where(table_hh2, names_idx, n), names_idx) if old_errors == 0: continue From d10453f58e6161157922a981accf3515214606d0 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 23 Jan 2023 14:25:57 +0100 Subject: [PATCH 11/19] feat(chalice): funnels cleaned --- api/chalicelib/core/funnels.py | 144 ------------------------------ api/routers/core.py | 70 --------------- api/schemas.py | 6 -- ee/api/chalicelib/core/funnels.py | 129 -------------------------- 4 files changed, 349 deletions(-) diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index 21799899d..0ab340ff1 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -52,108 +52,6 @@ def __transform_old_funnels(events): return events -def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public): - helper.delete_keys_from_dict(filter, REMOVE_KEYS) - filter.events = filter_stages(stages=filter.events) - with pg_client.PostgresClient() as cur: - query = cur.mogrify("""\ - INSERT INTO public.funnels (project_id, user_id, name, filter,is_public) - VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s) - RETURNING *;""", - {"user_id": user_id, "project_id": project_id, "name": name, - "filter": json.dumps(filter.dict()), - "is_public": is_public}) - - cur.execute( - query - ) - r = cur.fetchone() - r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) - r = helper.dict_to_camel_case(r) - r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"]) - return {"data": r} - - -def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=None): - s_query = [] - if filter is not None: - helper.delete_keys_from_dict(filter, REMOVE_KEYS) - s_query.append("filter = %(filter)s::jsonb") - if name is not None and len(name) > 0: - s_query.append("name = %(name)s") - if is_public is not None: - s_query.append("is_public = %(is_public)s") - if len(s_query) == 0: - return {"errors": ["Nothing to update"]} - with pg_client.PostgresClient() as cur: - query = cur.mogrify(f"""\ - UPDATE public.funnels - SET {" , ".join(s_query)} - WHERE funnel_id=%(funnel_id)s - AND project_id = %(project_id)s - AND (user_id = %(user_id)s OR is_public) - RETURNING *;""", {"user_id": user_id, "funnel_id": funnel_id, "name": name, - "filter": json.dumps(filter) if filter is not None else None, "is_public": is_public, - "project_id": project_id}) - # print("--------------------") - # print(query) - # print("--------------------") - cur.execute( - query - ) - r = cur.fetchone() - if r is None: - return {"errors": ["funnel not found"]} - r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) - r = helper.dict_to_camel_case(r) - r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"]) - r["filter"] = helper.old_search_payload_to_flat(r["filter"]) - return {"data": r} - - -def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date=None, details=False): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - f"""\ - SELECT funnel_id, project_id, user_id, name, created_at, deleted_at, is_public - {",filter" if details else ""} - FROM public.funnels - WHERE project_id = %(project_id)s - AND funnels.deleted_at IS NULL - AND (funnels.user_id = %(user_id)s OR funnels.is_public);""", - {"project_id": project_id, "user_id": user_id} - ) - ) - - rows = cur.fetchall() - rows = helper.list_to_camel_case(rows) - for row in rows: - row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) - if details: - row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"])) - if row.get("filter") is not None and row["filter"].get("events") is not None: - row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"])) - - get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date, - end_date=end_date) - counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(row["filter"]), - project_id=project_id, user_id=None, count_only=True) - row["sessionsCount"] = counts["countSessions"] - row["usersCount"] = counts["countUsers"] - filter_clone = dict(row["filter"]) - overview = significance.get_overview(filter_d=row["filter"], project_id=project_id) - row["stages"] = overview["stages"] - row.pop("filter") - row["stagesCount"] = len(row["stages"]) - # TODO: ask david to count it alone - row["criticalIssuesCount"] = overview["criticalIssuesCount"] - row["missedConversions"] = 0 if len(row["stages"]) < 2 \ - else row["stages"][0]["sessionsCount"] - row["stages"][-1]["sessionsCount"] - row["filter"] = helper.old_search_payload_to_flat(filter_clone) - return rows - - def get_possible_issue_types(project_id): return [{"type": t, "title": helper.get_issue_title(t)} for t in ['click_rage', 'dead_click', 'excessive_scrolling', @@ -172,21 +70,6 @@ def get_start_end_time(filter_d, range_value, start_date, end_date): filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"]) -def delete(project_id, funnel_id, user_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify("""\ - UPDATE public.funnels - SET deleted_at = timezone('utc'::text, now()) - WHERE project_id = %(project_id)s - AND funnel_id = %(funnel_id)s - AND (user_id = %(user_id)s OR is_public);""", - {"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id}) - ) - - return {"data": {"state": "success"}} - - def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None): f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) if f is None: @@ -343,30 +226,3 @@ def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True): if flatten: f["filter"] = helper.old_search_payload_to_flat(f["filter"]) return f - - -def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None, - start_date=None, end_date=None): - if len(data.events) == 0: - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - data.startDate = data.startDate if data.startDate is not None else start_date - data.endDate = data.endDate if data.endDate is not None else end_date - get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=data.startDate, - end_date=data.endDate) - data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"]) - - issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \ - .get("issues", {}) - issues = issues.get("significant", []) + issues.get("insignificant", []) - issue = None - for i in issues: - if i.get("issueId", "") == issue_id: - issue = i - break - return {"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, issue=issue, - data=data) if issue is not None else {"total": 0, "sessions": []}, - # "stages": helper.list_to_camel_case(insights), - # "totalDropDueToIssues": total_drop_due_to_issues, - "issue": issue} diff --git a/api/routers/core.py b/api/routers/core.py index 75e47e39b..8e06fa78e 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -611,37 +611,6 @@ def delete_alert(projectId: int, alertId: int, context: schemas.CurrentContext = return alerts.delete(projectId, alertId) -@app.post('/{projectId}/funnels', tags=["funnels"]) -def add_funnel(projectId: int, data: schemas.FunnelSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return funnels.create(project_id=projectId, - user_id=context.user_id, - name=data.name, - filter=data.filter, - is_public=data.is_public) - - -@app.get('/{projectId}/funnels', tags=["funnels"]) -def get_funnels(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": funnels.get_by_user(project_id=projectId, - user_id=context.user_id, - range_value=None, - start_date=None, - end_date=None, - details=False)} - - -@app.get('/{projectId}/funnels/details', tags=["funnels"]) -def get_funnels_with_details(projectId: int, rangeValue: str = None, startDate: int = None, endDate: int = None, - context: schemas.CurrentContext = Depends(OR_context)): - return {"data": funnels.get_by_user(project_id=projectId, - user_id=context.user_id, - range_value=rangeValue, - start_date=startDate, - end_date=endDate, - details=True)} - - @app.get('/{projectId}/funnels/issue_types', tags=["funnels"]) def get_possible_issue_types(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): return {"data": funnels.get_possible_issue_types(project_id=projectId)} @@ -703,45 +672,6 @@ def get_funnel_issue_sessions(projectId: int, issueId: str, startDate: int = Non "issue": issue}} -@app.post('/{projectId}/funnels/{funnelId}/issues/{issueId}/sessions', tags=["funnels"]) -def get_funnel_issue_sessions(projectId: int, funnelId: int, issueId: str, - data: schemas.FunnelSearchPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - data = funnels.search_by_issue(project_id=projectId, user_id=context.user_id, issue_id=issueId, - funnel_id=funnelId, data=data) - if "errors" in data: - return data - if data.get("issue") is None: - data["issue"] = issues.get(project_id=projectId, issue_id=issueId) - return { - "data": data - } - - -@app.get('/{projectId}/funnels/{funnelId}', tags=["funnels"]) -def get_funnel(projectId: int, funnelId: int, context: schemas.CurrentContext = Depends(OR_context)): - data = funnels.get(funnel_id=funnelId, project_id=projectId, user_id=context.user_id) - if data is None: - return {"errors": ["funnel not found"]} - return {"data": data} - - -@app.post('/{projectId}/funnels/{funnelId}', tags=["funnels"]) -def edit_funnel(projectId: int, funnelId: int, data: schemas.UpdateFunnelSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return funnels.update(funnel_id=funnelId, - user_id=context.user_id, - name=data.name, - filter=data.filter.dict(), - is_public=data.is_public, - project_id=projectId) - - -@app.delete('/{projectId}/funnels/{funnelId}', tags=["funnels"]) -def delete_filter(projectId: int, funnelId: int, context: schemas.CurrentContext = Depends(OR_context)): - return funnels.delete(user_id=context.user_id, funnel_id=funnelId, project_id=projectId) - - @app_apikey.put('/{projectKey}/sourcemaps/', tags=["sourcemaps"]) @app_apikey.put('/{projectKey}/sourcemaps', tags=["sourcemaps"]) def sign_sourcemap_for_upload(projectKey: str, data: schemas.SourcemapUploadPayloadSchema = Body(...), diff --git a/api/schemas.py b/api/schemas.py index 60d6c6e43..c19214a6a 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -783,12 +783,6 @@ class FunnelSchema(BaseModel): alias_generator = attribute_to_camel_case -class UpdateFunnelSchema(FunnelSchema): - name: Optional[str] = Field(default=None) - filter: Optional[FunnelSearchPayloadSchema] = Field(default=None) - is_public: Optional[bool] = Field(default=None) - - class FunnelInsightsPayloadSchema(FlatSessionsSearchPayloadSchema): # class FunnelInsightsPayloadSchema(SessionsSearchPayloadSchema): sort: Optional[str] = Field(None) diff --git a/ee/api/chalicelib/core/funnels.py b/ee/api/chalicelib/core/funnels.py index 27b84616e..62a776653 100644 --- a/ee/api/chalicelib/core/funnels.py +++ b/ee/api/chalicelib/core/funnels.py @@ -60,108 +60,6 @@ def __transform_old_funnels(events): return events -def create(project_id, user_id, name, filter: schemas.FunnelSearchPayloadSchema, is_public): - helper.delete_keys_from_dict(filter, REMOVE_KEYS) - filter.events = filter_stages(stages=filter.events) - with pg_client.PostgresClient() as cur: - query = cur.mogrify("""\ - INSERT INTO public.funnels (project_id, user_id, name, filter,is_public) - VALUES (%(project_id)s, %(user_id)s, %(name)s, %(filter)s::jsonb,%(is_public)s) - RETURNING *;""", - {"user_id": user_id, "project_id": project_id, "name": name, - "filter": json.dumps(filter.dict()), - "is_public": is_public}) - - cur.execute( - query - ) - r = cur.fetchone() - r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) - r = helper.dict_to_camel_case(r) - r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"]) - return {"data": r} - - -def update(funnel_id, user_id, project_id, name=None, filter=None, is_public=None): - s_query = [] - if filter is not None: - helper.delete_keys_from_dict(filter, REMOVE_KEYS) - s_query.append("filter = %(filter)s::jsonb") - if name is not None and len(name) > 0: - s_query.append("name = %(name)s") - if is_public is not None: - s_query.append("is_public = %(is_public)s") - if len(s_query) == 0: - return {"errors": ["Nothing to update"]} - with pg_client.PostgresClient() as cur: - query = cur.mogrify(f"""\ - UPDATE public.funnels - SET {" , ".join(s_query)} - WHERE funnel_id=%(funnel_id)s - AND project_id = %(project_id)s - AND (user_id = %(user_id)s OR is_public) - RETURNING *;""", {"user_id": user_id, "funnel_id": funnel_id, "name": name, - "filter": json.dumps(filter) if filter is not None else None, "is_public": is_public, - "project_id": project_id}) - # print("--------------------") - # print(query) - # print("--------------------") - cur.execute( - query - ) - r = cur.fetchone() - if r is None: - return {"errors": ["funnel not found"]} - r["created_at"] = TimeUTC.datetime_to_timestamp(r["created_at"]) - r = helper.dict_to_camel_case(r) - r["filter"]["startDate"], r["filter"]["endDate"] = TimeUTC.get_start_end_from_range(r["filter"]["rangeValue"]) - r["filter"] = helper.old_search_payload_to_flat(r["filter"]) - return {"data": r} - - -def get_by_user(project_id, user_id, range_value=None, start_date=None, end_date=None, details=False): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - f"""\ - SELECT funnel_id, project_id, user_id, name, created_at, deleted_at, is_public - {",filter" if details else ""} - FROM public.funnels - WHERE project_id = %(project_id)s - AND funnels.deleted_at IS NULL - AND (funnels.user_id = %(user_id)s OR funnels.is_public);""", - {"project_id": project_id, "user_id": user_id} - ) - ) - - rows = cur.fetchall() - rows = helper.list_to_camel_case(rows) - for row in rows: - row["createdAt"] = TimeUTC.datetime_to_timestamp(row["createdAt"]) - if details: - row["filter"]["events"] = filter_stages(__parse_events(row["filter"]["events"])) - if row.get("filter") is not None and row["filter"].get("events") is not None: - row["filter"]["events"] = __transform_old_funnels(__unparse_events(row["filter"]["events"])) - - get_start_end_time(filter_d=row["filter"], range_value=range_value, start_date=start_date, - end_date=end_date) - counts = sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(row["filter"]), - project_id=project_id, user_id=None, count_only=True) - row["sessionsCount"] = counts["countSessions"] - row["usersCount"] = counts["countUsers"] - filter_clone = dict(row["filter"]) - overview = significance.get_overview(filter_d=row["filter"], project_id=project_id) - row["stages"] = overview["stages"] - row.pop("filter") - row["stagesCount"] = len(row["stages"]) - # TODO: ask david to count it alone - row["criticalIssuesCount"] = overview["criticalIssuesCount"] - row["missedConversions"] = 0 if len(row["stages"]) < 2 \ - else row["stages"][0]["sessionsCount"] - row["stages"][-1]["sessionsCount"] - row["filter"] = helper.old_search_payload_to_flat(filter_clone) - return rows - - def get_possible_issue_types(project_id): return [{"type": t, "title": helper.get_issue_title(t)} for t in ['click_rage', 'dead_click', 'excessive_scrolling', @@ -351,30 +249,3 @@ def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True): if flatten: f["filter"] = helper.old_search_payload_to_flat(f["filter"]) return f - - -def search_by_issue(user_id, project_id, funnel_id, issue_id, data: schemas.FunnelSearchPayloadSchema, range_value=None, - start_date=None, end_date=None): - if len(data.events) == 0: - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - data.startDate = data.startDate if data.startDate is not None else start_date - data.endDate = data.endDate if data.endDate is not None else end_date - get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=data.startDate, - end_date=data.endDate) - data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"]) - - issues = get_issues_on_the_fly(funnel_id=funnel_id, user_id=user_id, project_id=project_id, data=data) \ - .get("issues", {}) - issues = issues.get("significant", []) + issues.get("insignificant", []) - issue = None - for i in issues: - if i.get("issueId", "") == issue_id: - issue = i - break - return {"sessions": sessions.search_sessions(user_id=user_id, project_id=project_id, issue=issue, - data=data) if issue is not None else {"total": 0, "sessions": []}, - # "stages": helper.list_to_camel_case(insights), - # "totalDropDueToIssues": total_drop_due_to_issues, - "issue": issue} From a332606580f7b24fbb05fabdc91c85bdefdb2c63 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 23 Jan 2023 14:41:35 +0100 Subject: [PATCH 12/19] feat(chalice): funnels cleaned feat(chalice): funnels refactored feat(chalice): funnels merged --- api/chalicelib/core/funnels.py | 164 +------------------ api/chalicelib/core/sessions.py | 33 ---- api/routers/core.py | 61 -------- ee/api/.gitignore | 2 +- ee/api/chalicelib/core/funnels.py | 251 ------------------------------ ee/api/clean.sh | 2 +- 6 files changed, 4 insertions(+), 509 deletions(-) delete mode 100644 ee/api/chalicelib/core/funnels.py diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index 0ab340ff1..c9f51fcc9 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -1,15 +1,9 @@ -import json from typing import List import schemas -from chalicelib.core import significance, sessions -from chalicelib.utils import helper, pg_client +from chalicelib.core import significance +from chalicelib.utils import helper from chalicelib.utils import sql_helper as sh -from chalicelib.utils.TimeUTC import TimeUTC - -REMOVE_KEYS = ["key", "_key", "startDate", "endDate"] - -ALLOW_UPDATE_FOR = ["name", "filter"] def filter_stages(stages: List[schemas._SessionSearchEventSchema]): @@ -24,10 +18,6 @@ def __parse_events(f_events: List[dict]): return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events] -def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]): - return [e.dict() for e in f_events] - - def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]): if f_events is None: return @@ -45,95 +35,6 @@ def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]): return events -def __transform_old_funnels(events): - for e in events: - if not isinstance(e.get("value"), list): - e["value"] = [e["value"]] - return events - - -def get_possible_issue_types(project_id): - return [{"type": t, "title": helper.get_issue_title(t)} for t in - ['click_rage', 'dead_click', 'excessive_scrolling', - 'bad_request', 'missing_resource', 'memory', 'cpu', - 'slow_resource', 'slow_page_load', 'crash', 'custom_event_error', - 'js_error']] - - -def get_start_end_time(filter_d, range_value, start_date, end_date): - if start_date is not None and end_date is not None: - filter_d["startDate"], filter_d["endDate"] = start_date, end_date - elif range_value is not None and len(range_value) > 0: - filter_d["rangeValue"] = range_value - filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(range_value) - else: - filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"]) - - -def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None): - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date) - return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]), - project_id=project_id, - user_id=user_id) - - -def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema): - data.events = filter_stages(data.events) - data.events = __fix_stages(data.events) - if len(data.events) == 0: - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=data.range_value, - start_date=data.startDate, end_date=data.endDate) - data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"]) - return sessions.search_sessions(data=data, project_id=project_id, - user_id=user_id) - - -def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None): - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date) - insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id) - insights = helper.list_to_camel_case(insights) - if len(insights) > 0: - # fix: this fix for huge drop count - if total_drop_due_to_issues > insights[0]["sessionsCount"]: - total_drop_due_to_issues = insights[0]["sessionsCount"] - # end fix - insights[-1]["dropDueToIssues"] = total_drop_due_to_issues - return {"data": {"stages": insights, - "totalDropDueToIssues": total_drop_due_to_issues}} - - -def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema): - data.events = filter_stages(__parse_events(data.events)) - if len(data.events) == 0: - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue, - start_date=data.startDate, - end_date=data.endDate) - data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"]) - data.events = __fix_stages(data.events) - insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id) - insights = helper.list_to_camel_case(insights) - if len(insights) > 0: - # fix: this fix for huge drop count - if total_drop_due_to_issues > insights[0]["sessionsCount"]: - total_drop_due_to_issues = insights[0]["sessionsCount"] - # end fix - insights[-1]["dropDueToIssues"] = total_drop_due_to_issues - return {"data": {"stages": insights, - "totalDropDueToIssues": total_drop_due_to_issues}} - - # def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema): def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema): data.events = filter_stages(__parse_events(data.events)) @@ -154,35 +55,6 @@ def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilte "totalDropDueToIssues": total_drop_due_to_issues} -def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None): - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date) - return {"data": { - "issues": helper.dict_to_camel_case(significance.get_issues_list(filter_d=f["filter"], project_id=project_id)) - }} - - -def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema): - data.events = filter_stages(data.events) - data.events = __fix_stages(data.events) - if len(data.events) == 0: - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue, - start_date=data.startDate, - end_date=data.endDate) - data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"]) - if len(data.events) < 2: - return {"issues": []} - return { - "issues": helper.dict_to_camel_case( - significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1, - last_stage=len(data.events)))} - - # def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema): def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema): data.events = filter_stages(data.events) @@ -194,35 +66,3 @@ def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchem "issues": helper.dict_to_camel_case( significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1, last_stage=len(data.events)))} - - -def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - """\ - SELECT - * - FROM public.funnels - WHERE project_id = %(project_id)s - AND deleted_at IS NULL - AND funnel_id = %(funnel_id)s - AND (user_id = %(user_id)s OR is_public);""", - {"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id} - ) - ) - - f = helper.dict_to_camel_case(cur.fetchone()) - if f is None: - return None - if f.get("filter") is not None and f["filter"].get("events") is not None: - f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"]) - f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"]) - f["filter"]["events"] = __parse_events(f["filter"]["events"]) - f["filter"]["events"] = filter_stages(stages=f["filter"]["events"]) - if fix_stages: - f["filter"]["events"] = __fix_stages(f["filter"]["events"]) - f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]] - if flatten: - f["filter"] = helper.old_search_payload_to_flat(f["filter"]) - return f diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index ad91076a6..2044b0353 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -1086,39 +1086,6 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): return results -def search_by_issue(user_id, issue, project_id, start_date, end_date): - constraints = ["s.project_id = %(projectId)s", - "p_issues.context_string = %(issueContextString)s", - "p_issues.type = %(issueType)s"] - if start_date is not None: - constraints.append("start_ts >= %(startDate)s") - if end_date is not None: - constraints.append("start_ts <= %(endDate)s") - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - f"""SELECT DISTINCT ON(favorite_sessions.session_id, s.session_id) {SESSION_PROJECTION_COLS} - FROM public.sessions AS s - INNER JOIN events_common.issues USING (session_id) - INNER JOIN public.issues AS p_issues USING (issue_id) - LEFT JOIN (SELECT user_id, session_id - FROM public.user_favorite_sessions - WHERE user_id = %(userId)s) AS favorite_sessions - USING (session_id) - WHERE {" AND ".join(constraints)} - ORDER BY s.session_id DESC;""", - { - "issueContextString": issue["contextString"], - "issueType": issue["type"], "userId": user_id, - "projectId": project_id, - "startDate": start_date, - "endDate": end_date - })) - - rows = cur.fetchall() - return helper.list_to_camel_case(rows) - - def get_user_sessions(project_id, user_id, start_date, end_date): with pg_client.PostgresClient() as cur: constraints = ["s.project_id = %(projectId)s", "s.user_id = %(userId)s"] diff --git a/api/routers/core.py b/api/routers/core.py index 8e06fa78e..074caf7c8 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -611,67 +611,6 @@ def delete_alert(projectId: int, alertId: int, context: schemas.CurrentContext = return alerts.delete(projectId, alertId) -@app.get('/{projectId}/funnels/issue_types', tags=["funnels"]) -def get_possible_issue_types(projectId: int, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": funnels.get_possible_issue_types(project_id=projectId)} - - -@app.get('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"]) -def get_funnel_insights(projectId: int, funnelId: int, rangeValue: str = None, startDate: int = None, - endDate: int = None, context: schemas.CurrentContext = Depends(OR_context)): - return funnels.get_top_insights(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, - range_value=rangeValue, start_date=startDate, end_date=endDate) - - -@app.post('/{projectId}/funnels/{funnelId}/insights', tags=["funnels"]) -def get_funnel_insights_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelInsightsPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return funnels.get_top_insights_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, - data=data) - - -@app.get('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"]) -def get_funnel_issues(projectId: int, funnelId, rangeValue: str = None, startDate: int = None, endDate: int = None, - context: schemas.CurrentContext = Depends(OR_context)): - return funnels.get_issues(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, - range_value=rangeValue, start_date=startDate, end_date=endDate) - - -@app.post('/{projectId}/funnels/{funnelId}/issues', tags=["funnels"]) -def get_funnel_issues_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return {"data": funnels.get_issues_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, - data=data)} - - -@app.get('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"]) -def get_funnel_sessions(projectId: int, funnelId: int, rangeValue: str = None, startDate: int = None, - endDate: int = None, context: schemas.CurrentContext = Depends(OR_context)): - return {"data": funnels.get_sessions(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, - range_value=rangeValue, - start_date=startDate, - end_date=endDate)} - - -@app.post('/{projectId}/funnels/{funnelId}/sessions', tags=["funnels"]) -def get_funnel_sessions_on_the_fly(projectId: int, funnelId: int, data: schemas.FunnelSearchPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): - return {"data": funnels.get_sessions_on_the_fly(funnel_id=funnelId, user_id=context.user_id, project_id=projectId, - data=data)} - - -@app.get('/{projectId}/funnels/issues/{issueId}/sessions', tags=["funnels"]) -def get_funnel_issue_sessions(projectId: int, issueId: str, startDate: int = None, endDate: int = None, - context: schemas.CurrentContext = Depends(OR_context)): - issue = issues.get(project_id=projectId, issue_id=issueId) - if issue is None: - return {"errors": ["issue not found"]} - return { - "data": {"sessions": sessions.search_by_issue(user_id=context.user_id, project_id=projectId, issue=issue, - start_date=startDate, end_date=endDate), - "issue": issue}} - - @app_apikey.put('/{projectKey}/sourcemaps/', tags=["sourcemaps"]) @app_apikey.put('/{projectKey}/sourcemaps', tags=["sourcemaps"]) def sign_sourcemap_for_upload(projectKey: str, data: schemas.SourcemapUploadPayloadSchema = Body(...), diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 57a0f1c86..360da751a 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -194,7 +194,7 @@ Pipfile.lock /chalicelib/core/errors_favorite.py #exp /chalicelib/core/events.py /chalicelib/core/events_ios.py -#exp /chalicelib/core/funnels.py +/chalicelib/core/funnels.py /chalicelib/core/integration_base.py /chalicelib/core/integration_base_issue.py /chalicelib/core/integration_github.py diff --git a/ee/api/chalicelib/core/funnels.py b/ee/api/chalicelib/core/funnels.py deleted file mode 100644 index 62a776653..000000000 --- a/ee/api/chalicelib/core/funnels.py +++ /dev/null @@ -1,251 +0,0 @@ -import json -from typing import List - -import chalicelib.utils.helper -import schemas -from chalicelib.core import significance -from chalicelib.utils import helper, pg_client -from chalicelib.utils import sql_helper as sh -from chalicelib.utils.TimeUTC import TimeUTC - -from decouple import config - -if config("EXP_SESSIONS_SEARCH", cast=bool, default=False): - from chalicelib.core import sessions_legacy as sessions -else: - from chalicelib.core import sessions - -REMOVE_KEYS = ["key", "_key", "startDate", "endDate"] - -ALLOW_UPDATE_FOR = ["name", "filter"] - - -def filter_stages(stages: List[schemas._SessionSearchEventSchema]): - ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input, - schemas.EventType.location, schemas.EventType.custom, - schemas.EventType.click_ios, schemas.EventType.input_ios, - schemas.EventType.view_ios, schemas.EventType.custom_ios, ] - return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None] - - -def __parse_events(f_events: List[dict]): - return [schemas._SessionSearchEventSchema.parse_obj(e) for e in f_events] - - -def __unparse_events(f_events: List[schemas._SessionSearchEventSchema]): - return [e.dict() for e in f_events] - - -def __fix_stages(f_events: List[schemas._SessionSearchEventSchema]): - if f_events is None: - return - events = [] - for e in f_events: - if e.operator is None: - e.operator = schemas.SearchEventOperator._is - - if not isinstance(e.value, list): - e.value = [e.value] - is_any = sh.isAny_opreator(e.operator) - if not is_any and isinstance(e.value, list) and len(e.value) == 0: - continue - events.append(e) - return events - - -def __transform_old_funnels(events): - for e in events: - if not isinstance(e.get("value"), list): - e["value"] = [e["value"]] - return events - - -def get_possible_issue_types(project_id): - return [{"type": t, "title": helper.get_issue_title(t)} for t in - ['click_rage', 'dead_click', 'excessive_scrolling', - 'bad_request', 'missing_resource', 'memory', 'cpu', - 'slow_resource', 'slow_page_load', 'crash', 'custom_event_error', - 'js_error']] - - -def get_start_end_time(filter_d, range_value, start_date, end_date): - if start_date is not None and end_date is not None: - filter_d["startDate"], filter_d["endDate"] = start_date, end_date - elif range_value is not None and len(range_value) > 0: - filter_d["rangeValue"] = range_value - filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(range_value) - else: - filter_d["startDate"], filter_d["endDate"] = TimeUTC.get_start_end_from_range(filter_d["rangeValue"]) - - -def delete(project_id, funnel_id, user_id): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify("""\ - UPDATE public.funnels - SET deleted_at = timezone('utc'::text, now()) - WHERE project_id = %(project_id)s - AND funnel_id = %(funnel_id)s - AND (user_id = %(user_id)s OR is_public);""", - {"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id}) - ) - - return {"data": {"state": "success"}} - - -def get_sessions(project_id, funnel_id, user_id, range_value=None, start_date=None, end_date=None): - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date) - return sessions.search_sessions(data=schemas.SessionsSearchPayloadSchema.parse_obj(f["filter"]), - project_id=project_id, - user_id=user_id) - - -def get_sessions_on_the_fly(funnel_id, project_id, user_id, data: schemas.FunnelSearchPayloadSchema): - data.events = filter_stages(data.events) - data.events = __fix_stages(data.events) - if len(data.events) == 0: - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=data.range_value, - start_date=data.startDate, end_date=data.endDate) - data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"]) - return sessions.search_sessions(data=data, project_id=project_id, - user_id=user_id) - - -def get_top_insights(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None): - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date) - insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=f["filter"], project_id=project_id) - insights = helper.list_to_camel_case(insights) - if len(insights) > 0: - # fix: this fix for huge drop count - if total_drop_due_to_issues > insights[0]["sessionsCount"]: - total_drop_due_to_issues = insights[0]["sessionsCount"] - # end fix - insights[-1]["dropDueToIssues"] = total_drop_due_to_issues - return {"data": {"stages": insights, - "totalDropDueToIssues": total_drop_due_to_issues}} - - -def get_top_insights_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelInsightsPayloadSchema): - data.events = filter_stages(__parse_events(data.events)) - if len(data.events) == 0: - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue, - start_date=data.startDate, - end_date=data.endDate) - data = schemas.FunnelInsightsPayloadSchema.parse_obj(f["filter"]) - data.events = __fix_stages(data.events) - insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id) - insights = helper.list_to_camel_case(insights) - if len(insights) > 0: - # fix: this fix for huge drop count - if total_drop_due_to_issues > insights[0]["sessionsCount"]: - total_drop_due_to_issues = insights[0]["sessionsCount"] - # end fix - insights[-1]["dropDueToIssues"] = total_drop_due_to_issues - return {"data": {"stages": insights, - "totalDropDueToIssues": total_drop_due_to_issues}} - - -# def get_top_insights_on_the_fly_widget(project_id, data: schemas.FunnelInsightsPayloadSchema): -def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema): - data.events = filter_stages(__parse_events(data.events)) - data.events = __fix_stages(data.events) - if len(data.events) == 0: - return {"stages": [], "totalDropDueToIssues": 0} - insights, total_drop_due_to_issues = significance.get_top_insights(filter_d=data.dict(), project_id=project_id) - insights = helper.list_to_camel_case(insights) - if len(insights) > 0: - # TODO: check if this correct - if total_drop_due_to_issues > insights[0]["sessionsCount"]: - if len(insights) == 0: - total_drop_due_to_issues = 0 - else: - total_drop_due_to_issues = insights[0]["sessionsCount"] - insights[-1]["sessionsCount"] - insights[-1]["dropDueToIssues"] = total_drop_due_to_issues - return {"stages": insights, - "totalDropDueToIssues": total_drop_due_to_issues} - - -def get_issues(project_id, user_id, funnel_id, range_value=None, start_date=None, end_date=None): - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=range_value, start_date=start_date, end_date=end_date) - return {"data": { - "issues": helper.dict_to_camel_case(significance.get_issues_list(filter_d=f["filter"], project_id=project_id)) - }} - - -def get_issues_on_the_fly(funnel_id, user_id, project_id, data: schemas.FunnelSearchPayloadSchema): - data.events = filter_stages(data.events) - data.events = __fix_stages(data.events) - if len(data.events) == 0: - f = get(funnel_id=funnel_id, project_id=project_id, user_id=user_id, flatten=False) - if f is None: - return {"errors": ["funnel not found"]} - get_start_end_time(filter_d=f["filter"], range_value=data.rangeValue, - start_date=data.startDate, - end_date=data.endDate) - data = schemas.FunnelSearchPayloadSchema.parse_obj(f["filter"]) - if len(data.events) < 2: - return {"issues": []} - return { - "issues": helper.dict_to_camel_case( - significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1, - last_stage=len(data.events)))} - - -# def get_issues_on_the_fly_widget(project_id, data: schemas.FunnelSearchPayloadSchema): -def get_issues_on_the_fly_widget(project_id, data: schemas.CardSeriesFilterSchema): - data.events = filter_stages(data.events) - data.events = __fix_stages(data.events) - if len(data.events) < 0: - return {"issues": []} - - return { - "issues": helper.dict_to_camel_case( - significance.get_issues_list(filter_d=data.dict(), project_id=project_id, first_stage=1, - last_stage=len(data.events)))} - - -def get(funnel_id, project_id, user_id, flatten=True, fix_stages=True): - with pg_client.PostgresClient() as cur: - cur.execute( - cur.mogrify( - """\ - SELECT - * - FROM public.funnels - WHERE project_id = %(project_id)s - AND deleted_at IS NULL - AND funnel_id = %(funnel_id)s - AND (user_id = %(user_id)s OR is_public);""", - {"funnel_id": funnel_id, "project_id": project_id, "user_id": user_id} - ) - ) - - f = helper.dict_to_camel_case(cur.fetchone()) - if f is None: - return None - if f.get("filter") is not None and f["filter"].get("events") is not None: - f["filter"]["events"] = __transform_old_funnels(f["filter"]["events"]) - f["createdAt"] = TimeUTC.datetime_to_timestamp(f["createdAt"]) - f["filter"]["events"] = __parse_events(f["filter"]["events"]) - f["filter"]["events"] = filter_stages(stages=f["filter"]["events"]) - if fix_stages: - f["filter"]["events"] = __fix_stages(f["filter"]["events"]) - f["filter"]["events"] = [e.dict() for e in f["filter"]["events"]] - if flatten: - f["filter"] = helper.old_search_payload_to_flat(f["filter"]) - return f diff --git a/ee/api/clean.sh b/ee/api/clean.sh index 82d61b52f..acc91e7b7 100755 --- a/ee/api/clean.sh +++ b/ee/api/clean.sh @@ -14,7 +14,7 @@ rm -rf ./chalicelib/core/errors_favorite.py #exp rm -rf ./chalicelib/core/events.py rm -rf ./chalicelib/core/events_ios.py rm -rf ./chalicelib/core/dashboards.py -#exp rm -rf ./chalicelib/core/funnels.py +rm -rf ./chalicelib/core/funnels.py rm -rf ./chalicelib/core/integration_base.py rm -rf ./chalicelib/core/integration_base_issue.py rm -rf ./chalicelib/core/integration_github.py From 716c40fd1ffeff2e448ce7ca83c3ad0705443489 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 23 Jan 2023 14:57:55 +0100 Subject: [PATCH 13/19] feat(chalice): fixed old keys support for wrong payloads --- api/schemas.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/api/schemas.py b/api/schemas.py index c19214a6a..740ecb5ba 100644 --- a/api/schemas.py +++ b/api/schemas.py @@ -551,6 +551,8 @@ class _SessionSearchEventRaw(__MixedSearchFilter): @root_validator(pre=True) def transform(cls, values): + if values.get("type") is None: + return values values["type"] = { "CLICK": EventType.click.value, "INPUT": EventType.input.value, @@ -634,6 +636,8 @@ class SessionSearchFilterSchema(__MixedSearchFilter): @root_validator(pre=True) def transform(cls, values): + if values.get("type") is None: + return values values["type"] = { "USEROS": FilterType.user_os.value, "USERBROWSER": FilterType.user_browser.value, From 71e67ac4088ad8fa40ccb0ee8429c514efd757bc Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 23 Jan 2023 15:15:55 +0100 Subject: [PATCH 14/19] feat(DB): removed funnels table --- .../db/init_dbs/postgresql/1.10.0/1.10.0.sql | 2 +- .../db/init_dbs/postgresql/init_schema.sql | 16 ---------------- .../db/init_dbs/postgresql/1.10.0/1.10.0.sql | 1 + .../db/init_dbs/postgresql/init_schema.sql | 16 ---------------- 4 files changed, 2 insertions(+), 33 deletions(-) diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index 090cc5b4a..0e9701f5f 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -329,7 +329,7 @@ DROP FUNCTION get_new_filter_key; DROP FUNCTION get_new_event_filter_key; DROP FUNCTION get_new_event_key; - +DROP TABLE IF EXISTS public.funnels; COMMIT; CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_selector_idx ON events.clicks (selector); diff --git a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 91e6d6d7b..a16955ac9 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -109,7 +109,6 @@ $$ ('dashboards'), ('dashboard_widgets'), ('errors'), - ('funnels'), ('integrations'), ('issues'), ('jira_cloud'), @@ -336,21 +335,6 @@ $$ ); - CREATE TABLE IF NOT EXISTS funnels - ( - funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - name text NOT NULL, - filter jsonb NOT NULL, - created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, - deleted_at timestamp, - is_public boolean NOT NULL DEFAULT False - ); - - CREATE INDEX IF NOT EXISTS funnels_user_id_is_public_idx ON public.funnels (user_id, is_public); - CREATE INDEX IF NOT EXISTS funnels_project_id_idx ON public.funnels (project_id); - IF NOT EXISTS(SELECT * FROM pg_type typ WHERE typ.typname = 'announcement_type') THEN diff --git a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql index d208e5ff0..9bfef752e 100644 --- a/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql +++ b/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -300,6 +300,7 @@ $$ $$ LANGUAGE plpgsql; +DROP TABLE IF EXISTS public.funnels; COMMIT; CREATE INDEX CONCURRENTLY IF NOT EXISTS clicks_selector_idx ON events.clicks (selector); diff --git a/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/scripts/schema/db/init_dbs/postgresql/init_schema.sql index 9e1427f2a..89d315f15 100644 --- a/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -249,22 +249,6 @@ $$ ); - CREATE TABLE funnels - ( - funnel_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, - project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, - user_id integer NOT NULL REFERENCES users (user_id) ON DELETE CASCADE, - name text NOT NULL, - filter jsonb NOT NULL, - created_at timestamp DEFAULT timezone('utc'::text, now()) NOT NULL, - deleted_at timestamp, - is_public boolean NOT NULL DEFAULT False - ); - - CREATE INDEX funnels_user_id_is_public_idx ON public.funnels (user_id, is_public); - CREATE INDEX funnels_project_id_idx ON public.funnels (project_id); - - CREATE TYPE announcement_type AS ENUM ('notification', 'alert'); CREATE TABLE announcements From 7bce8f11338ba7cbb8f0114341b33fe860d0556c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 23 Jan 2023 16:48:32 +0100 Subject: [PATCH 15/19] feat(chalice): fixed query after upgrade --- api/chalicelib/core/integrations_global.py | 24 +++++++++---------- ee/api/chalicelib/core/integrations_global.py | 24 +++++++++---------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/api/chalicelib/core/integrations_global.py b/api/chalicelib/core/integrations_global.py index 5b00a28bd..454d2a5e0 100644 --- a/api/chalicelib/core/integrations_global.py +++ b/api/chalicelib/core/integrations_global.py @@ -9,49 +9,49 @@ def get_global_integrations_status(tenant_id, user_id, project_id): SELECT EXISTS((SELECT 1 FROM public.oauth_authentication WHERE user_id = %(user_id)s - AND provider = 'github')) AS {schemas.IntegrationType.github}, + AND provider = 'github')) AS {schemas.IntegrationType.github.value}, EXISTS((SELECT 1 FROM public.jira_cloud - WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira}, + WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag}, + AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch}, + AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='datadog')) AS {schemas.IntegrationType.datadog}, + AND provider='datadog')) AS {schemas.IntegrationType.datadog.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='newrelic')) AS {schemas.IntegrationType.newrelic}, + AND provider='newrelic')) AS {schemas.IntegrationType.newrelic.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='rollbar')) AS {schemas.IntegrationType.rollbar}, + AND provider='rollbar')) AS {schemas.IntegrationType.rollbar.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='sentry')) AS {schemas.IntegrationType.sentry}, + AND provider='sentry')) AS {schemas.IntegrationType.sentry.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver}, + AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='sumologic')) AS {schemas.IntegrationType.sumologic}, + AND provider='sumologic')) AS {schemas.IntegrationType.sumologic.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch}, + AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch.value}, EXISTS((SELECT 1 FROM public.webhooks - WHERE type='slack')) AS {schemas.IntegrationType.slack};""", + WHERE type='slack')) AS {schemas.IntegrationType.slack.value};""", {"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id}) ) current_integrations = cur.fetchone() diff --git a/ee/api/chalicelib/core/integrations_global.py b/ee/api/chalicelib/core/integrations_global.py index b923fc5ab..cf40aedb7 100644 --- a/ee/api/chalicelib/core/integrations_global.py +++ b/ee/api/chalicelib/core/integrations_global.py @@ -9,49 +9,49 @@ def get_global_integrations_status(tenant_id, user_id, project_id): SELECT EXISTS((SELECT 1 FROM public.oauth_authentication WHERE user_id = %(user_id)s - AND provider = 'github')) AS {schemas.IntegrationType.github}, + AND provider = 'github')) AS {schemas.IntegrationType.github.value}, EXISTS((SELECT 1 FROM public.jira_cloud - WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira}, + WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag}, + AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch}, + AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='datadog')) AS {schemas.IntegrationType.datadog}, + AND provider='datadog')) AS {schemas.IntegrationType.datadog.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='newrelic')) AS {schemas.IntegrationType.newrelic}, + AND provider='newrelic')) AS {schemas.IntegrationType.newrelic.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='rollbar')) AS {schemas.IntegrationType.rollbar}, + AND provider='rollbar')) AS {schemas.IntegrationType.rollbar.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='sentry')) AS {schemas.IntegrationType.sentry}, + AND provider='sentry')) AS {schemas.IntegrationType.sentry.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver}, + AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='sumologic')) AS {schemas.IntegrationType.sumologic}, + AND provider='sumologic')) AS {schemas.IntegrationType.sumologic.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch}, + AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch.value}, EXISTS((SELECT 1 FROM public.webhooks - WHERE type='slack' AND tenant_id=%(tenant_id)s)) AS {schemas.IntegrationType.slack};""", + WHERE type='slack' AND tenant_id=%(tenant_id)s)) AS {schemas.IntegrationType.slack.value};""", {"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id}) ) current_integrations = cur.fetchone() From 0519a498150fe5cb596c318cc55f44fc9cc34a7c Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 23 Jan 2023 16:56:14 +0100 Subject: [PATCH 16/19] feat(chalice): fixed trail-schema --- ee/api/schemas_ee.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index ceae7cfc5..60d92817b 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -79,7 +79,15 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema): user_id: Optional[int] = Field(default=None) query: Optional[str] = Field(default=None) action: Optional[str] = Field(default=None) - order: Literal["asc", "desc"] = Field(default="desc") + order: schemas.SortOrderType = Field(default=schemas.SortOrderType.desc) + + @root_validator(pre=True) + def transform_order(cls, values): + if values.get("order") is None: + values["order"] = schemas.SortOrderType.desc + else: + values["order"] = values["order"].upper() + return values class Config: alias_generator = schemas.attribute_to_camel_case From b3d9c750946215872fbee03098a2c209c0b7952f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 23 Jan 2023 17:49:30 +0100 Subject: [PATCH 17/19] feat(chalice): force SSO --- ee/api/chalicelib/core/reset_password.py | 4 ++++ ee/api/chalicelib/core/users.py | 3 +++ scripts/helmcharts/vars.yaml | 1 + 3 files changed, 8 insertions(+) diff --git a/ee/api/chalicelib/core/reset_password.py b/ee/api/chalicelib/core/reset_password.py index 6f1af14b6..5566ad27f 100644 --- a/ee/api/chalicelib/core/reset_password.py +++ b/ee/api/chalicelib/core/reset_password.py @@ -1,3 +1,5 @@ +from decouple import config + import schemas from chalicelib.core import users from chalicelib.utils import email_helper, captcha, helper @@ -15,6 +17,8 @@ def reset(data: schemas.ForgetPasswordPayloadSchema): # ---FOR SSO if a_user.get("origin") is not None and a_user.get("hasPassword", False) is False: return {"errors": ["Please use your SSO to login"]} + if config("enforce_SSO", cast=bool, default=False) and not a_user["superAdmin"]: + return {"errors": ["Please use your SSO to login, enforced by admin"]} # ---------- invitation_link = users.generate_new_invitation(user_id=a_user["id"]) email_helper.send_forgot_password(recipient=data.email, invitation_link=invitation_link) diff --git a/ee/api/chalicelib/core/users.py b/ee/api/chalicelib/core/users.py index 423b7621c..14f7052e5 100644 --- a/ee/api/chalicelib/core/users.py +++ b/ee/api/chalicelib/core/users.py @@ -741,6 +741,9 @@ def authenticate(email, password, for_change_password=False): if for_change_password: return True r = helper.dict_to_camel_case(r) + if config("enforce_SSO", cast=bool, default=False) and not r["superAdmin"]: + return {"errors": ["must sign-in with SSO, enforced by admin"]} + jwt_iat = change_jwt_iat(r['userId']) iat = TimeUTC.datetime_to_timestamp(jwt_iat) return { diff --git a/scripts/helmcharts/vars.yaml b/scripts/helmcharts/vars.yaml index c24f8feb3..a44402040 100644 --- a/scripts/helmcharts/vars.yaml +++ b/scripts/helmcharts/vars.yaml @@ -149,6 +149,7 @@ chalice: # idp_sls_url: '' # idp_name: '' # idp_tenantKey: '' + # enforce_SSO: 'false' # If you want to override something # chartname: From a083be4ff2757c2ecf0ce7aa64f4d21def749ba6 Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 23 Jan 2023 18:23:14 +0100 Subject: [PATCH 18/19] feat(chalice): fixed heatmap --- ee/api/routers/core_dynamic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index da120ba39..3e49f2320 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -307,7 +307,7 @@ def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], @app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], dependencies=[OR_scope(Permissions.session_replay)]) def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatmapPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): - return {"data": heatmaps.get_by_url(project_id=projectId, data=data.dict())} + return {"data": heatmaps.get_by_url(project_id=projectId, data=data)} @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"], From 7e34bec76f770b70f35df45e3dc0166fb144161e Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Mon, 23 Jan 2023 18:27:22 +0100 Subject: [PATCH 19/19] feat(chalice): fixed queries after upgrade --- api/chalicelib/core/sessions.py | 4 ++-- ee/api/chalicelib/core/sessions.py | 10 +++++----- ee/api/chalicelib/core/sessions_exp.py | 6 +++--- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index 2044b0353..651f18e96 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -799,7 +799,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + - sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", + sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s", event.source, value_key=e_k)) elif event_type == schemas.PerformanceEventType.time_between_events: event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) " @@ -837,7 +837,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, e_k += "_custom" full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} event_where.append( - sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s", + sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator.value} %({e_k})s", event.source, value_key=e_k)) elif event_type == schemas.EventType.request_details: diff --git a/ee/api/chalicelib/core/sessions.py b/ee/api/chalicelib/core/sessions.py index 67877b455..8187f9c5f 100644 --- a/ee/api/chalicelib/core/sessions.py +++ b/ee/api/chalicelib/core/sessions.py @@ -206,9 +206,9 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ ORDER BY s.session_id desc) AS filtred_sessions ORDER BY {sort} {data.order}, issue_score DESC) AS full_sessions;""", full_args) - # print("--------------------") - # print(main_query) - # print("--------------------") + print("--------------------") + print(main_query) + print("--------------------") try: cur.execute(main_query) except Exception as err: @@ -801,7 +801,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + - sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", + sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s", event.source, value_key=e_k)) elif event_type == schemas.PerformanceEventType.time_between_events: event_from = event_from % f"{getattr(events.EventType, event.value[0].type).table} AS main INNER JOIN {getattr(events.EventType, event.value[1].type).table} AS main2 USING(session_id) " @@ -839,7 +839,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, e_k += "_custom" full_args = {**full_args, **sh.multi_values(event.source, value_key=e_k)} event_where.append( - sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s", + sh.multi_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator.value} %({e_k})s", event.source, value_key=e_k)) elif event_type == schemas.EventType.request_details: diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index e0ca6e971..1f957380e 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -963,7 +963,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)} event_where.append(f"isNotNull({tname}.{colname}) AND {tname}.{colname}>0 AND " + - _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", + _multiple_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s", event.source, value_key=e_k)) events_conditions[-1]["condition"].append(event_where[-1]) events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"]) @@ -986,7 +986,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu full_args = {**full_args, **_multiple_values(event.source, value_key=e_k)} event_where.append(f"isNotNull({tname}.{colname}) AND {tname}.{colname}>0 AND " + - _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", + _multiple_conditions(f"{tname}.{colname} {event.sourceOperator.value} %({e_k})s", event.source, value_key=e_k)) events_conditions[-1]["condition"].append(event_where[-1]) events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"]) @@ -1038,7 +1038,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu # _multiple_conditions(f"main2.timestamp - main.timestamp {event.sourceOperator} %({e_k})s", # event.source, value_key=e_k)) # events_conditions[-2]["time"] = f"(?t{event.sourceOperator} %({e_k})s)" - events_conditions[-2]["time"] = _multiple_conditions(f"?t{event.sourceOperator}%({e_k})s", event.source, + events_conditions[-2]["time"] = _multiple_conditions(f"?t{event.sourceOperator.value}%({e_k})s", event.source, value_key=e_k) event_index += 1 # TODO: no isNot for RequestDetails