diff --git a/api/chalicelib/core/sourcemaps.py b/api/chalicelib/core/sourcemaps.py index 886198e35..555b5b057 100644 --- a/api/chalicelib/core/sourcemaps.py +++ b/api/chalicelib/core/sourcemaps.py @@ -1,4 +1,3 @@ -import hashlib from urllib.parse import urlparse import requests @@ -8,17 +7,11 @@ from chalicelib.core import sourcemaps_parser from chalicelib.utils import s3 -def __get_key(project_id, url): - u = urlparse(url) - new_url = u.scheme + "://" + u.netloc + u.path - return f"{project_id}/{hashlib.md5(new_url.encode()).hexdigest()}" - - def presign_share_urls(project_id, urls): results = [] for u in urls: results.append(s3.get_presigned_url_for_sharing(bucket=config('sourcemaps_bucket'), expires_in=120, - key=__get_key(project_id, u), + key=s3.generate_file_key_from_url(project_id, u), check_exists=True)) return results @@ -28,7 +21,7 @@ def presign_upload_urls(project_id, urls): for u in urls: results.append(s3.get_presigned_url_for_upload(bucket=config('sourcemaps_bucket'), expires_in=1800, - key=__get_key(project_id, u))) + key=s3.generate_file_key_from_url(project_id, u))) return results @@ -88,7 +81,7 @@ def get_traces_group(project_id, payload): file_exists_in_bucket = False file_exists_in_server = False file_url = u["absPath"] - key = __get_key(project_id, file_url) # use filename instead? + key = s3.generate_file_key_from_url(project_id, file_url) # use filename instead? params_idx = file_url.find("?") if file_url and len(file_url) > 0 \ and not (file_url[:params_idx] if params_idx > -1 else file_url).endswith(".js"): @@ -180,7 +173,7 @@ def fetch_missed_contexts(frames): line = lines[l] offset = c - MAX_COLUMN_OFFSET - if offset < 0: # if the line is shirt + if offset < 0: # if the line is short offset = 0 frames[i]["context"].append([frames[i]["lineNo"], line[offset: c + MAX_COLUMN_OFFSET + 1]]) return frames diff --git a/api/chalicelib/utils/s3.py b/api/chalicelib/utils/s3.py index 5458c8f14..4c6c02b62 100644 --- a/api/chalicelib/utils/s3.py +++ b/api/chalicelib/utils/s3.py @@ -1,3 +1,6 @@ +import hashlib +from urllib.parse import urlparse + from botocore.exceptions import ClientError from decouple import config from datetime import datetime, timedelta @@ -88,3 +91,13 @@ def schedule_for_deletion(bucket, key): s3_object.copy_from(CopySource={'Bucket': bucket, 'Key': key}, Expires=datetime.now() + timedelta(days=7), MetadataDirective='REPLACE') + + +def generate_file_key(project_id, key): + return f"{project_id}/{hashlib.md5(key.encode()).hexdigest()}" + + +def generate_file_key_from_url(project_id, url): + u = urlparse(url) + new_url = u.scheme + "://" + u.netloc + u.path + return generate_file_key(project_id=project_id, key=new_url) diff --git a/ee/api/chalicelib/core/assist_records.py b/ee/api/chalicelib/core/assist_records.py new file mode 100644 index 000000000..4c39bbe83 --- /dev/null +++ b/ee/api/chalicelib/core/assist_records.py @@ -0,0 +1,26 @@ +from decouple import config + +import schemas_ee +from chalicelib.utils import s3, pg_client +from chalicelib.utils.TimeUTC import TimeUTC + + +def presign_records(project_id, data: schemas_ee.AssistRecordUploadPayloadSchema, context: schemas_ee.CurrentContext): + results = [] + params = {"user_id": context.user_id, "project_id": project_id} + + for i, r in enumerate(data.records): + key = f"{TimeUTC.now() + i}-{r.name}" + results.append(s3.get_presigned_url_for_upload(bucket=config('ASSIST_RECORDS_BUCKET'), + expires_in=1800, + key=s3.generate_file_key(project_id=project_id, key=key))) + params[f"name_{i}"] = r.name + params[f"duration_{i}"] = r.duration + params[f"key_{i}"] = key + with pg_client.PostgresClient() as cur: + values = [f"(%(project_id)s, %(user_id)s, %(name_{i})s, %(key_{i})s, %(duration_{i})s)" for i in + range(len(data.records))] + query = cur.mogrify(f"""INSERT INTO assist_records(project_id, user_id, name, file_key, duration) + VALUES {",".join(values)}""", params) + cur.execute(query) + return results diff --git a/ee/api/env.default b/ee/api/env.default index e707bec57..f980c9763 100644 --- a/ee/api/env.default +++ b/ee/api/env.default @@ -45,6 +45,7 @@ PG_MAXCONN=50 PG_RETRY_MAX=50 PG_RETRY_INTERVAL=2 PG_POOL=true +ASSIST_RECORDS_BUCKET=mobs sessions_bucket=mobs sessions_region=us-east-1 sourcemaps_bucket=sourcemaps diff --git a/ee/api/routers/ee.py b/ee/api/routers/ee.py index dc08ac569..915c016df 100644 --- a/ee/api/routers/ee.py +++ b/ee/api/routers/ee.py @@ -1,4 +1,6 @@ -from chalicelib.core import roles, traces +from typing import Union + +from chalicelib.core import roles, traces, projects, sourcemaps, assist_records from chalicelib.core import unlock from chalicelib.utils import assist_helper @@ -6,7 +8,6 @@ unlock.check() from or_dependencies import OR_context from routers.base import get_routers -import schemas import schemas_ee from fastapi import Depends, Body @@ -14,7 +15,7 @@ public_app, app, app_apikey = get_routers() @app.get('/client/roles', tags=["client", "roles"]) -def get_roles(context: schemas.CurrentContext = Depends(OR_context)): +def get_roles(context: schemas_ee.CurrentContext = Depends(OR_context)): return { 'data': roles.get_roles(tenant_id=context.tenant_id) } @@ -22,7 +23,7 @@ def get_roles(context: schemas.CurrentContext = Depends(OR_context)): @app.post('/client/roles', tags=["client", "roles"]) @app.put('/client/roles', tags=["client", "roles"]) -def add_role(data: schemas_ee.RolePayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): +def add_role(data: schemas_ee.RolePayloadSchema = Body(...), context: schemas_ee.CurrentContext = Depends(OR_context)): data = roles.create(tenant_id=context.tenant_id, user_id=context.user_id, data=data) if "errors" in data: return data @@ -35,7 +36,7 @@ def add_role(data: schemas_ee.RolePayloadSchema = Body(...), context: schemas.Cu @app.post('/client/roles/{roleId}', tags=["client", "roles"]) @app.put('/client/roles/{roleId}', tags=["client", "roles"]) def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas_ee.CurrentContext = Depends(OR_context)): data = roles.update(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId, data=data) if "errors" in data: return data @@ -46,7 +47,7 @@ def edit_role(roleId: int, data: schemas_ee.RolePayloadSchema = Body(...), @app.delete('/client/roles/{roleId}', tags=["client", "roles"]) -def delete_role(roleId: int, context: schemas.CurrentContext = Depends(OR_context)): +def delete_role(roleId: int, context: schemas_ee.CurrentContext = Depends(OR_context)): data = roles.delete(tenant_id=context.tenant_id, user_id=context.user_id, role_id=roleId) if "errors" in data: return data @@ -62,12 +63,19 @@ def get_assist_credentials(): @app.post('/trails', tags=["traces", "trails"]) def get_trails(data: schemas_ee.TrailSearchPayloadSchema = Body(...), - context: schemas.CurrentContext = Depends(OR_context)): + context: schemas_ee.CurrentContext = Depends(OR_context)): return { 'data': traces.get_all(tenant_id=context.tenant_id, data=data) } @app.post('/trails/actions', tags=["traces", "trails"]) -def get_available_trail_actions(context: schemas.CurrentContext = Depends(OR_context)): +def get_available_trail_actions(context: schemas_ee.CurrentContext = Depends(OR_context)): return {'data': traces.get_available_actions(tenant_id=context.tenant_id)} + + +@app.put('/{projectId}/assist/save/', tags=["assist"]) +@app.put('/{projectId}/assist/save', tags=["assist"]) +def sign_record_for_upload(projectId: int, data: schemas_ee.AssistRecordUploadPayloadSchema = Body(...), + context: schemas_ee.CurrentContext = Depends(OR_context)): + return {"data": assist_records.presign_records(project_id=projectId, data=data, context=context)} diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index 9690eb334..efd88f580 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -81,3 +81,12 @@ class SessionModel(BaseModel): userDeviceType: str userAnonymousId: Optional[str] metadata: dict = Field(default={}) + + +class AssistRecord(BaseModel): + name: str = Field(...) + duration: int = Field(...) + + +class AssistRecordUploadPayloadSchema(BaseModel): + records: List[AssistRecord] = Field(default=[]) diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.9.5/1.9.5.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.9.5/1.9.5.sql new file mode 100644 index 000000000..245b2f85d --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.9.5/1.9.5.sql @@ -0,0 +1,20 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.9.5-ee' +$$ LANGUAGE sql IMMUTABLE; + +CREATE TABLE IF NOT EXISTS assist_records +( + record_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + name text NOT NULL, + file_key text NOT NULL, + duration integer NOT NULL +); + +COMMIT; \ No newline at end of file diff --git a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql index f486c731e..16353f4fd 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -131,7 +131,8 @@ $$ ('user_viewed_sessions'), ('users'), ('webhooks'), - ('sessions_notes')) + ('sessions_notes'), + ('assist_records')) select bool_and(exists(select * from information_schema.tables t where table_schema = 'public' @@ -1245,7 +1246,17 @@ $$ CREATE INDEX IF NOT EXISTS requests_query_nn_idx ON events_common.requests (query) WHERE query IS NOT NULL; CREATE INDEX IF NOT EXISTS requests_query_nn_gin_idx ON events_common.requests USING GIN (query gin_trgm_ops) WHERE query IS NOT NULL; - + CREATE TABLE IF NOT EXISTS assist_records + ( + record_id integer generated BY DEFAULT AS IDENTITY PRIMARY KEY, + project_id integer NOT NULL REFERENCES projects (project_id) ON DELETE CASCADE, + user_id integer NOT NULL REFERENCES users (user_id) ON DELETE SET NULL, + created_at timestamp without time zone NOT NULL default (now() at time zone 'utc'), + deleted_at timestamp without time zone NULL DEFAULT NULL, + name text NOT NULL, + file_key text NOT NULL, + duration integer NOT NULL + ); END IF; END; $$