diff --git a/.github/workflows/api-ee.yaml b/.github/workflows/api-ee.yaml index 9eb23910a..a6a3998cb 100644 --- a/.github/workflows/api-ee.yaml +++ b/.github/workflows/api-ee.yaml @@ -67,7 +67,7 @@ jobs: } && { echo "Skipping Security Checks" } - PUSH_IMAGE=1 bash -x ./build.sh ee + docker push $DOCKER_REPO/$image:$IMAGE_TAG - name: Creating old image input run: | # diff --git a/.github/workflows/api.yaml b/.github/workflows/api.yaml index e4d85ff24..e85775ed2 100644 --- a/.github/workflows/api.yaml +++ b/.github/workflows/api.yaml @@ -66,7 +66,7 @@ jobs: } && { echo "Skipping Security Checks" } - PUSH_IMAGE=1 bash -x ./build.sh + docker push $DOCKER_REPO/$image:$IMAGE_TAG - name: Creating old image input run: | # diff --git a/.github/workflows/frontend-dev.yaml b/.github/workflows/frontend-dev.yaml new file mode 100644 index 000000000..fa7137d1c --- /dev/null +++ b/.github/workflows/frontend-dev.yaml @@ -0,0 +1,81 @@ +name: Frontend Dev Deployment +on: workflow_dispatch +# Disable previous workflows for this action. +concurrency: + group: ${{ github.workflow }} #-${{ github.ref }} + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Cache node modules + uses: actions/cache@v1 + with: + path: node_modules + key: ${{ runner.OS }}-build-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.OS }}-build- + ${{ runner.OS }}- + + - name: Docker login + run: | + docker login ${{ secrets.OSS_REGISTRY_URL }} -u ${{ secrets.OSS_DOCKER_USERNAME }} -p "${{ secrets.OSS_REGISTRY_TOKEN }}" + + - uses: azure/k8s-set-context@v1 + with: + method: kubeconfig + kubeconfig: ${{ secrets.DEV_KUBECONFIG }} # Use content of kubeconfig in secret. + id: setcontext + + - name: Building and Pushing frontend image + id: build-image + env: + DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} + ENVIRONMENT: staging + run: | + set -x + cd frontend + mv .env.sample .env + docker run --rm -v /etc/passwd:/etc/passwd -u `id -u`:`id -g` -v $(pwd):/home/${USER} -w /home/${USER} --name node_build node:14-stretch-slim /bin/bash -c "yarn && yarn build" + # https://github.com/docker/cli/issues/1134#issuecomment-613516912 + DOCKER_BUILDKIT=1 docker build --target=cicd -t $DOCKER_REPO/frontend:${IMAGE_TAG} . + docker tag $DOCKER_REPO/frontend:${IMAGE_TAG} $DOCKER_REPO/frontend:${IMAGE_TAG}-ee + docker push $DOCKER_REPO/frontend:${IMAGE_TAG} + docker push $DOCKER_REPO/frontend:${IMAGE_TAG}-ee + + - name: Deploy to kubernetes foss + run: | + cd scripts/helmcharts/ + + set -x + cat <>/tmp/image_override.yaml + frontend: + image: + tag: ${IMAGE_TAG} + EOF + + ## Update secerts + sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml + sed -i "s/postgresqlPassword: \"changeMePassword\"/postgresqlPassword: \"${{ secrets.DEV_PG_PASSWORD }}\"/g" vars.yaml + sed -i "s/accessKey: \"changeMeMinioAccessKey\"/accessKey: \"${{ secrets.DEV_MINIO_ACCESS_KEY }}\"/g" vars.yaml + sed -i "s/secretKey: \"changeMeMinioPassword\"/secretKey: \"${{ secrets.DEV_MINIO_SECRET_KEY }}\"/g" vars.yaml + sed -i "s/jwt_secret: \"SetARandomStringHere\"/jwt_secret: \"${{ secrets.DEV_JWT_SECRET }}\"/g" vars.yaml + sed -i "s/domainName: \"\"/domainName: \"${{ secrets.DEV_DOMAIN_NAME }}\"/g" vars.yaml + + # Update changed image tag + sed -i "/frontend/{n;n;s/.*/ tag: ${IMAGE_TAG}/}" /tmp/image_override.yaml + + cat /tmp/image_override.yaml + # Deploy command + mv openreplay/charts/{ingress-nginx,frontend,quickwit} /tmp + rm -rf openreplay/charts/* + mv /tmp/{ingress-nginx,frontend,quickwit} openreplay/charts/ + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f - + env: + DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} + IMAGE_TAG: ${{ github.ref_name }}_${{ github.sha }} diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml index 2f2fd3989..db90235ee 100644 --- a/.github/workflows/frontend.yaml +++ b/.github/workflows/frontend.yaml @@ -1,4 +1,4 @@ -name: Frontend FOSS Deployment +name: Frontend Foss Deployment on: workflow_dispatch: push: diff --git a/.github/workflows/workers-ee.yaml b/.github/workflows/workers-ee.yaml index c573dff26..b75d22093 100644 --- a/.github/workflows/workers-ee.yaml +++ b/.github/workflows/workers-ee.yaml @@ -71,12 +71,10 @@ jobs: case ${build_param} in false) { - git diff --name-only HEAD HEAD~1 | grep -E "backend/cmd|backend/services" | grep -vE ^ee/ | cut -d '/' -f3 - git diff --name-only HEAD HEAD~1 | grep -E "backend/pkg|backend/internal" | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do grep -rl "pkg/$pkg_name" backend/services backend/cmd | cut -d '/' -f3 done - } | uniq > /tmp/images_to_build.txt + } | awk '!seen[$0]++' > /tmp/images_to_build.txt ;; all) ls backend/cmd > /tmp/images_to_build.txt @@ -95,6 +93,7 @@ jobs: # Pushing image to registry # cd backend + cat /tmp/images_to_build.txt for image in $(cat /tmp/images_to_build.txt); do echo "Bulding $image" @@ -109,7 +108,7 @@ jobs: } && { echo "Skipping Security Checks" } - PUSH_IMAGE=1 bash -x ./build.sh ee $image + docker push $DOCKER_REPO/$image:$IMAGE_TAG echo "::set-output name=image::$DOCKER_REPO/$image:$IMAGE_TAG" done @@ -156,22 +155,19 @@ jobs: mv /tmp/helmcharts/* openreplay/charts/ ls openreplay/charts - cat /tmp/image_override.yaml - # Deploy command helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true | kubectl apply -f - - #- name: Alert slack - # if: ${{ failure() }} - # uses: rtCamp/action-slack-notify@v2 - # env: - # SLACK_CHANNEL: ee - # SLACK_TITLE: "Failed ${{ github.workflow }}" - # SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' - # SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} - # SLACK_USERNAME: "OR Bot" - # SLACK_MESSAGE: 'Build failed :bomb:' - + - name: Alert slack + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: ee + SLACK_TITLE: "Failed ${{ github.workflow }}" + SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} + SLACK_USERNAME: "OR Bot" + SLACK_MESSAGE: 'Build failed :bomb:' # - name: Debug Job # if: ${{ failure() }} diff --git a/.github/workflows/workers.yaml b/.github/workflows/workers.yaml index e222e00fb..0d9927df9 100644 --- a/.github/workflows/workers.yaml +++ b/.github/workflows/workers.yaml @@ -71,12 +71,10 @@ jobs: case ${build_param} in false) { - git diff --name-only HEAD HEAD~1 | grep -E "backend/cmd|backend/services" | grep -vE ^ee/ | cut -d '/' -f3 - git diff --name-only HEAD HEAD~1 | grep -E "backend/pkg|backend/internal" | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do grep -rl "pkg/$pkg_name" backend/services backend/cmd | cut -d '/' -f3 done - } | uniq > /tmp/images_to_build.txt + } | awk '!seen[$0]++' > /tmp/images_to_build.txt ;; all) ls backend/cmd > /tmp/images_to_build.txt @@ -95,6 +93,7 @@ jobs: # Pushing image to registry # cd backend + cat /tmp/images_to_build.txt for image in $(cat /tmp/images_to_build.txt); do echo "Bulding $image" @@ -109,7 +108,7 @@ jobs: } && { echo "Skipping Security Checks" } - PUSH_IMAGE=1 bash -x ./build.sh skip $image + docker push $DOCKER_REPO/$image:$IMAGE_TAG echo "::set-output name=image::$DOCKER_REPO/$image:$IMAGE_TAG" done @@ -154,8 +153,6 @@ jobs: mv /tmp/helmcharts/* openreplay/charts/ ls openreplay/charts - cat /tmp/image_override.yaml - # Deploy command helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true | kubectl apply -f - diff --git a/.gitignore b/.gitignore index ee79ca544..d509c8e8c 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,5 @@ public node_modules *DS_Store *.env -.idea \ No newline at end of file +**/*.envrc +.idea diff --git a/api/Dockerfile b/api/Dockerfile index 036dcb0f2..dec3156cb 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,6 +1,9 @@ FROM python:3.10-alpine LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA + RUN apk add --no-cache build-base tini ARG envarg # Add Tini @@ -9,7 +12,8 @@ ENV SOURCE_MAP_VERSION=0.7.4 \ APP_NAME=chalice \ LISTEN_PORT=8000 \ PRIVATE_ENDPOINTS=false \ - ENTERPRISE_BUILD=${envarg} + ENTERPRISE_BUILD=${envarg} \ + GIT_SHA=$GIT_SHA WORKDIR /work COPY requirements.txt ./requirements.txt diff --git a/api/Dockerfile_alerts b/api/Dockerfile_alerts index 881b21fb9..117e28456 100644 --- a/api/Dockerfile_alerts +++ b/api/Dockerfile_alerts @@ -1,6 +1,9 @@ FROM python:3.10-alpine LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA + RUN apk add --no-cache build-base tini ARG envarg ENV APP_NAME=alerts \ @@ -8,6 +11,7 @@ ENV APP_NAME=alerts \ PG_MAXCONN=10 \ LISTEN_PORT=8000 \ PRIVATE_ENDPOINTS=true \ + GIT_SHA=$GIT_SHA \ ENTERPRISE_BUILD=${envarg} WORKDIR /work diff --git a/api/build.sh b/api/build.sh index 895f9bb8e..c5610e233 100644 --- a/api/build.sh +++ b/api/build.sh @@ -16,7 +16,8 @@ exit_err() { } environment=$1 -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} envarg="default-foss" check_prereq() { which docker || { @@ -41,12 +42,12 @@ function build_api(){ tag="ee-" } mv Dockerfile.dockerignore .dockerignore - docker build -f ./Dockerfile --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/chalice:${git_sha1} . + docker build -f ./Dockerfile --build-arg envarg=$envarg --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/chalice:${image_tag} . cd ../api rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/chalice:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/chalice:${git_sha1} ${DOCKER_REPO:-'local'}/chalice:${tag}latest + docker push ${DOCKER_REPO:-'local'}/chalice:${image_tag} + docker tag ${DOCKER_REPO:-'local'}/chalice:${image_tag} ${DOCKER_REPO:-'local'}/chalice:${tag}latest docker push ${DOCKER_REPO:-'local'}/chalice:${tag}latest } echo "api docker build completed" diff --git a/api/build_alerts.sh b/api/build_alerts.sh index a36472a8d..81c4cdb36 100644 --- a/api/build_alerts.sh +++ b/api/build_alerts.sh @@ -7,7 +7,8 @@ # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} envarg="default-foss" check_prereq() { which docker || { @@ -31,12 +32,12 @@ function build_alerts(){ tag="ee-" } mv Dockerfile_alerts.dockerignore .dockerignore - docker build -f ./Dockerfile_alerts --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/alerts:${git_sha1} . + docker build -f ./Dockerfile_alerts --build-arg envarg=$envarg --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/alerts:${image_tag} . cd ../api rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/alerts:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/alerts:${git_sha1} ${DOCKER_REPO:-'local'}/alerts:${tag}latest + docker push ${DOCKER_REPO:-'local'}/alerts:${image_tag} + docker tag ${DOCKER_REPO:-'local'}/alerts:${image_tag} ${DOCKER_REPO:-'local'}/alerts:${tag}latest docker push ${DOCKER_REPO:-'local'}/alerts:${tag}latest } echo "completed alerts build" diff --git a/backend/Dockerfile b/backend/Dockerfile index 0d7cad075..043de51cd 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -19,10 +19,14 @@ RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o service -tags dynamic open FROM alpine AS entrypoint +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA + RUN apk add --no-cache ca-certificates librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5 RUN adduser -u 1001 openreplay -D ENV TZ=UTC \ + GIT_SHA=$GIT_SHA \ FS_ULIMIT=1000 \ FS_DIR=/mnt/efs \ MAXMINDDB_FILE=/home/openreplay/geoip.mmdb \ @@ -69,9 +73,10 @@ ENV TZ=UTC \ PARTITIONS_NUMBER=16 \ QUEUE_MESSAGE_SIZE_LIMIT=1048576 \ BEACON_SIZE_LIMIT=1000000 \ - USE_FAILOVER=true \ + USE_FAILOVER=false \ GROUP_STORAGE_FAILOVER=failover \ - TOPIC_STORAGE_FAILOVER=storage-failover + TOPIC_STORAGE_FAILOVER=storage-failover \ + PROFILER_ENABLED=false diff --git a/backend/build.sh b/backend/build.sh index 073f540df..6ff4cd4ef 100755 --- a/backend/build.sh +++ b/backend/build.sh @@ -9,7 +9,8 @@ # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh set -e -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} ee="false" check_prereq() { which docker || { @@ -22,9 +23,9 @@ check_prereq() { function build_service() { image="$1" echo "BUILDING $image" - docker build -t ${DOCKER_REPO:-'local'}/$image:${git_sha1} --platform linux/amd64 --build-arg SERVICE_NAME=$image . + docker build -t ${DOCKER_REPO:-'local'}/$image:${image_tag} --platform linux/amd64 --build-arg SERVICE_NAME=$image --build-arg GIT_SHA=$git_sha . [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/$image:${git_sha1} + docker push ${DOCKER_REPO:-'local'}/$image:${image_tag} } echo "Build completed for $image" return @@ -51,7 +52,7 @@ function build_api(){ for image in $(ls cmd); do build_service $image - echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${git_sha1}" + echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${image_tag}" done cd ../backend rm -rf ../${destination} diff --git a/backend/cmd/assets/main.go b/backend/cmd/assets/main.go index 5fdc85107..b41dedd87 100644 --- a/backend/cmd/assets/main.go +++ b/backend/cmd/assets/main.go @@ -3,6 +3,7 @@ package main import ( "context" "log" + "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -18,10 +19,12 @@ import ( func main() { metrics := monitoring.New("assets") - log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() + if cfg.UseProfiler { + pprof.StartProfilingServer() + } cacher := cacher.NewCacher(cfg, metrics) diff --git a/backend/cmd/db/main.go b/backend/cmd/db/main.go index 8db029394..74e6149bc 100644 --- a/backend/cmd/db/main.go +++ b/backend/cmd/db/main.go @@ -4,6 +4,7 @@ import ( "errors" "log" types2 "openreplay/backend/pkg/db/types" + "openreplay/backend/pkg/pprof" "openreplay/backend/pkg/queue/types" "os" "os/signal" @@ -16,7 +17,6 @@ import ( "openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/handlers" custom2 "openreplay/backend/pkg/handlers/custom" - logger "openreplay/backend/pkg/log" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/monitoring" "openreplay/backend/pkg/queue" @@ -25,10 +25,12 @@ import ( func main() { metrics := monitoring.New("db") - log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := db.New() + if cfg.UseProfiler { + pprof.StartProfilingServer() + } // Init database pg := cache.NewPGCache( @@ -56,20 +58,17 @@ func main() { // Init modules saver := datasaver.New(pg, producer) saver.InitStats() - statsLogger := logger.NewQueueStats(cfg.LoggerTimeout) msgFilter := []int{messages.MsgMetadata, messages.MsgIssueEvent, messages.MsgSessionStart, messages.MsgSessionEnd, messages.MsgUserID, messages.MsgUserAnonymousID, messages.MsgClickEvent, messages.MsgIntegrationEvent, messages.MsgPerformanceTrackAggr, messages.MsgJSException, messages.MsgResourceTiming, - messages.MsgRawCustomEvent, messages.MsgCustomIssue, messages.MsgFetch, messages.MsgGraphQL, + messages.MsgCustomEvent, messages.MsgCustomIssue, messages.MsgFetch, messages.MsgNetworkRequest, messages.MsgGraphQL, messages.MsgStateAction, messages.MsgSetInputTarget, messages.MsgSetInputValue, messages.MsgCreateDocument, messages.MsgMouseClick, messages.MsgSetPageLocation, messages.MsgPageLoadTiming, messages.MsgPageRenderTiming} // Handler logic msgHandler := func(msg messages.Message) { - statsLogger.Collect(msg) - // Just save session data into db without additional checks if err := saver.InsertMessage(msg); err != nil { if !postgres.IsPkeyViolation(err) { diff --git a/backend/cmd/ender/main.go b/backend/cmd/ender/main.go index beb69bd42..be582c2fd 100644 --- a/backend/cmd/ender/main.go +++ b/backend/cmd/ender/main.go @@ -3,8 +3,10 @@ package main import ( "log" "openreplay/backend/internal/storage" + "openreplay/backend/pkg/pprof" "os" "os/signal" + "strings" "syscall" "time" @@ -13,21 +15,24 @@ import ( "openreplay/backend/pkg/db/cache" "openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/intervals" - logger "openreplay/backend/pkg/log" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/monitoring" "openreplay/backend/pkg/queue" ) func main() { - log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) metrics := monitoring.New("ender") + log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) + cfg := ender.New() + if cfg.UseProfiler { + pprof.StartProfilingServer() + } pg := cache.NewPGCache(postgres.NewConn(cfg.Postgres, 0, 0, metrics), cfg.ProjectExpirationTimeoutMs) defer pg.Close() - sessions, err := sessionender.New(metrics, intervals.EVENTS_SESSION_END_TIMEOUT, cfg.PartitionsNumber, logger.NewQueueStats(cfg.LoggerTimeout)) + sessions, err := sessionender.New(metrics, intervals.EVENTS_SESSION_END_TIMEOUT, cfg.PartitionsNumber) if err != nil { log.Printf("can't init ender service: %s", err) return @@ -62,6 +67,9 @@ func main() { consumer.Close() os.Exit(0) case <-tick: + failedSessionEnds := make(map[uint64]int64) + duplicatedSessionEnds := make(map[uint64]uint64) + // Find ended sessions and send notification to other services sessions.HandleEndedSessions(func(sessionID uint64, timestamp int64) bool { msg := &messages.SessionEnd{Timestamp: uint64(timestamp)} @@ -71,12 +79,17 @@ func main() { } newDuration, err := pg.InsertSessionEnd(sessionID, msg.Timestamp) if err != nil { + if strings.Contains(err.Error(), "integer out of range") { + // Skip session with broken duration + failedSessionEnds[sessionID] = timestamp + return true + } log.Printf("can't save sessionEnd to database, sessID: %d, err: %s", sessionID, err) return false } if currDuration == newDuration { - log.Printf("sessionEnd duplicate, sessID: %d, prevDur: %d, newDur: %d", sessionID, - currDuration, newDuration) + // Skip session end duplicate + duplicatedSessionEnds[sessionID] = currDuration return true } if cfg.UseEncryption { @@ -94,6 +107,12 @@ func main() { } return true }) + if len(failedSessionEnds) > 0 { + log.Println("sessions with wrong duration:", failedSessionEnds) + } + if len(duplicatedSessionEnds) > 0 { + log.Println("session end duplicates:", duplicatedSessionEnds) + } producer.Flush(cfg.ProducerTimeout) if err := consumer.CommitBack(intervals.EVENTS_BACK_COMMIT_GAP); err != nil { log.Printf("can't commit messages with offset: %s", err) diff --git a/backend/cmd/heuristics/main.go b/backend/cmd/heuristics/main.go index 0a5c77ea5..ac55b83bc 100644 --- a/backend/cmd/heuristics/main.go +++ b/backend/cmd/heuristics/main.go @@ -2,6 +2,7 @@ package main import ( "log" + "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -11,7 +12,6 @@ import ( "openreplay/backend/pkg/handlers" web2 "openreplay/backend/pkg/handlers/web" "openreplay/backend/pkg/intervals" - logger "openreplay/backend/pkg/log" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/queue" "openreplay/backend/pkg/sessions" @@ -20,8 +20,10 @@ import ( func main() { log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) - // Load service configuration cfg := heuristics.New() + if cfg.UseProfiler { + pprof.StartProfilingServer() + } // HandlersFabric returns the list of message handlers we want to be applied to each incoming message. handlersFabric := func() []handlers.MessageProcessor { @@ -41,14 +43,10 @@ func main() { // Create handler's aggregator builderMap := sessions.NewBuilderMap(handlersFabric) - // Init logger - statsLogger := logger.NewQueueStats(cfg.LoggerTimeout) - // Init producer and consumer for data bus producer := queue.NewProducer(cfg.MessageSizeLimit, true) msgHandler := func(msg messages.Message) { - statsLogger.Collect(msg) builderMap.HandleMessage(msg) } diff --git a/backend/cmd/http/main.go b/backend/cmd/http/main.go index 7012917e4..72dfab5ed 100644 --- a/backend/cmd/http/main.go +++ b/backend/cmd/http/main.go @@ -7,6 +7,7 @@ import ( "openreplay/backend/internal/http/server" "openreplay/backend/internal/http/services" "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -18,10 +19,12 @@ import ( func main() { metrics := monitoring.New("http") - log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := http.New() + if cfg.UseProfiler { + pprof.StartProfilingServer() + } // Connect to queue producer := queue.NewProducer(cfg.MessageSizeLimit, true) diff --git a/backend/cmd/integrations/main.go b/backend/cmd/integrations/main.go index 4f5a30dcf..59d12b0c7 100644 --- a/backend/cmd/integrations/main.go +++ b/backend/cmd/integrations/main.go @@ -5,6 +5,7 @@ import ( config "openreplay/backend/internal/config/integrations" "openreplay/backend/internal/integrations/clientManager" "openreplay/backend/pkg/monitoring" + "openreplay/backend/pkg/pprof" "time" "os" @@ -19,10 +20,12 @@ import ( func main() { metrics := monitoring.New("integrations") - log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() + if cfg.UseProfiler { + pprof.StartProfilingServer() + } pg := postgres.NewConn(cfg.PostgresURI, 0, 0, metrics) defer pg.Close() diff --git a/backend/cmd/sink/main.go b/backend/cmd/sink/main.go index 03f11b200..0999e3936 100644 --- a/backend/cmd/sink/main.go +++ b/backend/cmd/sink/main.go @@ -3,6 +3,7 @@ package main import ( "context" "log" + "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -20,10 +21,12 @@ import ( func main() { metrics := monitoring.New("sink") - log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := sink.New() + if cfg.UseProfiler { + pprof.StartProfilingServer() + } if _, err := os.Stat(cfg.FsDir); os.IsNotExist(err) { log.Fatalf("%v doesn't exist. %v", cfg.FsDir, err) diff --git a/backend/cmd/storage/main.go b/backend/cmd/storage/main.go index 251ce82e2..dcb1b53ed 100644 --- a/backend/cmd/storage/main.go +++ b/backend/cmd/storage/main.go @@ -2,6 +2,7 @@ package main import ( "log" + "openreplay/backend/pkg/pprof" "os" "os/signal" "syscall" @@ -18,10 +19,12 @@ import ( func main() { metrics := monitoring.New("storage") - log.SetFlags(log.LstdFlags | log.LUTC | log.Llongfile) cfg := config.New() + if cfg.UseProfiler { + pprof.StartProfilingServer() + } s3 := s3storage.NewS3(cfg.S3Region, cfg.S3Bucket) srv, err := storage.New(cfg, s3, metrics) @@ -44,8 +47,8 @@ func main() { messages.NewMessageIterator( func(msg messages.Message) { sesEnd := msg.(*messages.SessionEnd) - if err := srv.UploadSessionFiles(sesEnd); err != nil { - log.Printf("can't find session: %d", msg.SessionID()) + if err := srv.Upload(sesEnd); err != nil { + log.Printf("upload session err: %s, sessID: %d", err, msg.SessionID()) sessionFinder.Find(msg.SessionID(), sesEnd.Timestamp) } // Log timestamp of last processed session @@ -54,7 +57,7 @@ func main() { []int{messages.MsgSessionEnd}, true, ), - true, + false, cfg.MessageSizeLimit, ) @@ -69,10 +72,15 @@ func main() { case sig := <-sigchan: log.Printf("Caught signal %v: terminating\n", sig) sessionFinder.Stop() + srv.Wait() consumer.Close() os.Exit(0) case <-counterTick: go counter.Print() + srv.Wait() + if err := consumer.Commit(); err != nil { + log.Printf("can't commit messages: %s", err) + } case msg := <-consumer.Rebalanced(): log.Println(msg) default: diff --git a/backend/internal/config/assets/config.go b/backend/internal/config/assets/config.go index 1dfc8a4a8..399ee84f4 100644 --- a/backend/internal/config/assets/config.go +++ b/backend/internal/config/assets/config.go @@ -14,6 +14,7 @@ type Config struct { AssetsOrigin string `env:"ASSETS_ORIGIN,required"` AssetsSizeLimit int `env:"ASSETS_SIZE_LIMIT,required"` AssetsRequestHeaders map[string]string `env:"ASSETS_REQUEST_HEADERS"` + UseProfiler bool `env:"PROFILER_ENABLED,default=false"` } func New() *Config { diff --git a/backend/internal/config/db/config.go b/backend/internal/config/db/config.go index 715d9ff8e..6ec25ab01 100644 --- a/backend/internal/config/db/config.go +++ b/backend/internal/config/db/config.go @@ -18,6 +18,7 @@ type Config struct { BatchQueueLimit int `env:"DB_BATCH_QUEUE_LIMIT,required"` BatchSizeLimit int `env:"DB_BATCH_SIZE_LIMIT,required"` UseQuickwit bool `env:"QUICKWIT_ENABLED,default=false"` + UseProfiler bool `env:"PROFILER_ENABLED,default=false"` } func New() *Config { diff --git a/backend/internal/config/ender/config.go b/backend/internal/config/ender/config.go index fb315acbe..cc963285f 100644 --- a/backend/internal/config/ender/config.go +++ b/backend/internal/config/ender/config.go @@ -15,6 +15,7 @@ type Config struct { ProducerTimeout int `env:"PRODUCER_TIMEOUT,default=2000"` PartitionsNumber int `env:"PARTITIONS_NUMBER,required"` UseEncryption bool `env:"USE_ENCRYPTION,default=false"` + UseProfiler bool `env:"PROFILER_ENABLED,default=false"` } func New() *Config { diff --git a/backend/internal/config/heuristics/config.go b/backend/internal/config/heuristics/config.go index fbe0eab81..6552944a3 100644 --- a/backend/internal/config/heuristics/config.go +++ b/backend/internal/config/heuristics/config.go @@ -13,6 +13,7 @@ type Config struct { TopicRawWeb string `env:"TOPIC_RAW_WEB,required"` TopicRawIOS string `env:"TOPIC_RAW_IOS,required"` ProducerTimeout int `env:"PRODUCER_TIMEOUT,default=2000"` + UseProfiler bool `env:"PROFILER_ENABLED,default=false"` } func New() *Config { diff --git a/backend/internal/config/http/config.go b/backend/internal/config/http/config.go index 3c30d3980..b8bd6cfbb 100644 --- a/backend/internal/config/http/config.go +++ b/backend/internal/config/http/config.go @@ -23,6 +23,7 @@ type Config struct { TokenSecret string `env:"TOKEN_SECRET,required"` UAParserFile string `env:"UAPARSER_FILE,required"` MaxMinDBFile string `env:"MAXMINDDB_FILE,required"` + UseProfiler bool `env:"PROFILER_ENABLED,default=false"` WorkerID uint16 } diff --git a/backend/internal/config/integrations/config.go b/backend/internal/config/integrations/config.go index c61377b8c..4713fd6b0 100644 --- a/backend/internal/config/integrations/config.go +++ b/backend/internal/config/integrations/config.go @@ -10,6 +10,7 @@ type Config struct { TopicAnalytics string `env:"TOPIC_ANALYTICS,required"` PostgresURI string `env:"POSTGRES_STRING,required"` TokenSecret string `env:"TOKEN_SECRET,required"` + UseProfiler bool `env:"PROFILER_ENABLED,default=false"` } func New() *Config { diff --git a/backend/internal/config/sink/config.go b/backend/internal/config/sink/config.go index 53e3517a4..802dfb54b 100644 --- a/backend/internal/config/sink/config.go +++ b/backend/internal/config/sink/config.go @@ -21,6 +21,8 @@ type Config struct { ProducerCloseTimeout int `env:"PRODUCER_CLOSE_TIMEOUT,default=15000"` CacheThreshold int64 `env:"CACHE_THRESHOLD,default=5"` CacheExpiration int64 `env:"CACHE_EXPIRATION,default=120"` + CacheBlackList string `env:"CACHE_BLACK_LIST,default="` + UseProfiler bool `env:"PROFILER_ENABLED,default=false"` } func New() *Config { diff --git a/backend/internal/config/storage/config.go b/backend/internal/config/storage/config.go index 6083f0249..63c595f62 100644 --- a/backend/internal/config/storage/config.go +++ b/backend/internal/config/storage/config.go @@ -21,6 +21,7 @@ type Config struct { ProducerCloseTimeout int `env:"PRODUCER_CLOSE_TIMEOUT,default=15000"` UseFailover bool `env:"USE_FAILOVER,default=false"` MaxFileSize int64 `env:"MAX_FILE_SIZE,default=524288000"` + UseProfiler bool `env:"PROFILER_ENABLED,default=false"` } func New() *Config { diff --git a/backend/internal/db/datasaver/fts.go b/backend/internal/db/datasaver/fts.go index c0250c4d2..6b6bdbbae 100644 --- a/backend/internal/db/datasaver/fts.go +++ b/backend/internal/db/datasaver/fts.go @@ -6,7 +6,7 @@ import ( "openreplay/backend/pkg/messages" ) -type FetchEventFTS struct { +type NetworkRequestFTS struct { Method string `json:"method"` URL string `json:"url"` Request string `json:"request"` @@ -36,7 +36,7 @@ type PageEventFTS struct { TimeToInteractive uint64 `json:"time_to_interactive"` } -type GraphQLEventFTS struct { +type GraphQLFTS struct { OperationKind string `json:"operation_kind"` OperationName string `json:"operation_name"` Variables string `json:"variables"` @@ -56,18 +56,8 @@ func (s *Saver) sendToFTS(msg messages.Message, sessionID uint64) { switch m := msg.(type) { // Common - case *messages.Fetch: - event, err = json.Marshal(FetchEventFTS{ - Method: m.Method, - URL: m.URL, - Request: m.Request, - Response: m.Response, - Status: m.Status, - Timestamp: m.Timestamp, - Duration: m.Duration, - }) - case *messages.FetchEvent: - event, err = json.Marshal(FetchEventFTS{ + case *messages.NetworkRequest: + event, err = json.Marshal(NetworkRequestFTS{ Method: m.Method, URL: m.URL, Request: m.Request, @@ -97,14 +87,7 @@ func (s *Saver) sendToFTS(msg messages.Message, sessionID uint64) { TimeToInteractive: m.TimeToInteractive, }) case *messages.GraphQL: - event, err = json.Marshal(GraphQLEventFTS{ - OperationKind: m.OperationKind, - OperationName: m.OperationName, - Variables: m.Variables, - Response: m.Response, - }) - case *messages.GraphQLEvent: - event, err = json.Marshal(GraphQLEventFTS{ + event, err = json.Marshal(GraphQLFTS{ OperationKind: m.OperationKind, OperationName: m.OperationName, Variables: m.Variables, diff --git a/backend/internal/db/datasaver/messages.go b/backend/internal/db/datasaver/messages.go index 621659c6d..834db33ed 100644 --- a/backend/internal/db/datasaver/messages.go +++ b/backend/internal/db/datasaver/messages.go @@ -38,12 +38,12 @@ func (mi *Saver) InsertMessage(msg Message) error { case *PageEvent: mi.sendToFTS(msg, sessionID) return mi.pg.InsertWebPageEvent(sessionID, m) - case *FetchEvent: + case *NetworkRequest: mi.sendToFTS(msg, sessionID) - return mi.pg.InsertWebFetchEvent(sessionID, m) - case *GraphQLEvent: + return mi.pg.InsertWebNetworkRequest(sessionID, m) + case *GraphQL: mi.sendToFTS(msg, sessionID) - return mi.pg.InsertWebGraphQLEvent(sessionID, m) + return mi.pg.InsertWebGraphQL(sessionID, m) case *JSException: return mi.pg.InsertWebJSException(m) case *IntegrationEvent: diff --git a/backend/internal/sessionender/ender.go b/backend/internal/sessionender/ender.go index dbd3eb901..c1c2c9b7f 100644 --- a/backend/internal/sessionender/ender.go +++ b/backend/internal/sessionender/ender.go @@ -5,7 +5,6 @@ import ( "fmt" "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" - log2 "openreplay/backend/pkg/log" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/monitoring" "time" @@ -29,10 +28,9 @@ type SessionEnder struct { timeCtrl *timeController activeSessions syncfloat64.UpDownCounter totalSessions syncfloat64.Counter - stats log2.QueueStats } -func New(metrics *monitoring.Metrics, timeout int64, parts int, stats log2.QueueStats) (*SessionEnder, error) { +func New(metrics *monitoring.Metrics, timeout int64, parts int) (*SessionEnder, error) { if metrics == nil { return nil, fmt.Errorf("metrics module is empty") } @@ -51,13 +49,11 @@ func New(metrics *monitoring.Metrics, timeout int64, parts int, stats log2.Queue timeCtrl: NewTimeController(parts), activeSessions: activeSessions, totalSessions: totalSessions, - stats: stats, }, nil } // UpdateSession save timestamp for new sessions and update for existing sessions func (se *SessionEnder) UpdateSession(msg messages.Message) { - se.stats.Collect(msg) var ( sessionID = msg.Meta().SessionID() batchTimestamp = msg.Meta().Batch().Timestamp() diff --git a/backend/internal/sink/assetscache/assets.go b/backend/internal/sink/assetscache/assets.go index aa2dccbba..4c63f6897 100644 --- a/backend/internal/sink/assetscache/assets.go +++ b/backend/internal/sink/assetscache/assets.go @@ -12,6 +12,7 @@ import ( "openreplay/backend/pkg/monitoring" "openreplay/backend/pkg/queue/types" "openreplay/backend/pkg/url/assets" + "strings" "sync" "time" ) @@ -27,6 +28,7 @@ type AssetsCache struct { rewriter *assets.Rewriter producer types.Producer cache map[string]*CachedAsset + blackList []string // use "example.com" to filter all domains or ".example.com" to filter only third-level domain totalAssets syncfloat64.Counter cachedAssets syncfloat64.Counter skippedAssets syncfloat64.Counter @@ -61,12 +63,22 @@ func New(cfg *sink.Config, rewriter *assets.Rewriter, producer types.Producer, m rewriter: rewriter, producer: producer, cache: make(map[string]*CachedAsset, 64), + blackList: make([]string, 0), totalAssets: totalAssets, cachedAssets: cachedAssets, skippedAssets: skippedAssets, assetSize: assetSize, assetDuration: assetDuration, } + // Parse black list for cache layer + if len(cfg.CacheBlackList) > 0 { + blackList := strings.Split(cfg.CacheBlackList, ",") + for _, domain := range blackList { + if len(domain) > 0 { + assetsCache.blackList = append(assetsCache.blackList, domain) + } + } + } go assetsCache.cleaner() return assetsCache } @@ -98,6 +110,22 @@ func (e *AssetsCache) clearCache() { log.Printf("cache cleaner: deleted %d/%d assets", deleted, cacheSize) } +func (e *AssetsCache) shouldSkipAsset(baseURL string) bool { + if len(e.blackList) == 0 { + return false + } + host, err := parseHost(baseURL) + if err != nil { + return false + } + for _, blackHost := range e.blackList { + if strings.Contains(host, blackHost) { + return true + } + } + return false +} + func (e *AssetsCache) ParseAssets(msg messages.Message) messages.Message { switch m := msg.(type) { case *messages.SetNodeAttributeURLBased: @@ -110,6 +138,9 @@ func (e *AssetsCache) ParseAssets(msg messages.Message) messages.Message { newMsg.SetMeta(msg.Meta()) return newMsg } else if m.Name == "style" { + if e.shouldSkipAsset(m.BaseURL) { + return msg + } newMsg := &messages.SetNodeAttribute{ ID: m.ID, Name: m.Name, @@ -119,6 +150,9 @@ func (e *AssetsCache) ParseAssets(msg messages.Message) messages.Message { return newMsg } case *messages.SetCSSDataURLBased: + if e.shouldSkipAsset(m.BaseURL) { + return msg + } newMsg := &messages.SetCSSData{ ID: m.ID, Data: e.handleCSS(m.SessionID(), m.BaseURL, m.Data), @@ -126,6 +160,9 @@ func (e *AssetsCache) ParseAssets(msg messages.Message) messages.Message { newMsg.SetMeta(msg.Meta()) return newMsg case *messages.CSSInsertRuleURLBased: + if e.shouldSkipAsset(m.BaseURL) { + return msg + } newMsg := &messages.CSSInsertRule{ ID: m.ID, Index: m.Index, @@ -134,6 +171,9 @@ func (e *AssetsCache) ParseAssets(msg messages.Message) messages.Message { newMsg.SetMeta(msg.Meta()) return newMsg case *messages.AdoptedSSReplaceURLBased: + if e.shouldSkipAsset(m.BaseURL) { + return msg + } newMsg := &messages.AdoptedSSReplace{ SheetID: m.SheetID, Text: e.handleCSS(m.SessionID(), m.BaseURL, m.Text), @@ -141,6 +181,9 @@ func (e *AssetsCache) ParseAssets(msg messages.Message) messages.Message { newMsg.SetMeta(msg.Meta()) return newMsg case *messages.AdoptedSSInsertRuleURLBased: + if e.shouldSkipAsset(m.BaseURL) { + return msg + } newMsg := &messages.AdoptedSSInsertRule{ SheetID: m.SheetID, Index: m.Index, @@ -180,13 +223,21 @@ func (e *AssetsCache) handleURL(sessionID uint64, baseURL string, urlVal string) } } +func parseHost(baseURL string) (string, error) { + u, err := url.Parse(baseURL) + if err != nil { + return "", err + } + return u.Scheme + "://" + u.Host + "/", nil +} + func (e *AssetsCache) handleCSS(sessionID uint64, baseURL string, css string) string { ctx := context.Background() e.totalAssets.Add(ctx, 1) // Try to find asset in cache h := md5.New() // Cut first part of url (scheme + host) - u, err := url.Parse(baseURL) + justUrl, err := parseHost(baseURL) if err != nil { log.Printf("can't parse url: %s, err: %s", baseURL, err) if e.cfg.CacheAssets { @@ -194,7 +245,6 @@ func (e *AssetsCache) handleCSS(sessionID uint64, baseURL string, css string) st } return e.getRewrittenCSS(sessionID, baseURL, css) } - justUrl := u.Scheme + "://" + u.Host + "/" // Calculate hash sum of url + css io.WriteString(h, justUrl) io.WriteString(h, css) diff --git a/backend/internal/storage/storage.go b/backend/internal/storage/storage.go index 12a37183f..3b315561d 100644 --- a/backend/internal/storage/storage.go +++ b/backend/internal/storage/storage.go @@ -4,29 +4,51 @@ import ( "bytes" "context" "fmt" + gzip "github.com/klauspost/pgzip" "go.opentelemetry.io/otel/metric/instrument/syncfloat64" "log" config "openreplay/backend/internal/config/storage" - "openreplay/backend/pkg/flakeid" "openreplay/backend/pkg/messages" "openreplay/backend/pkg/monitoring" "openreplay/backend/pkg/storage" "os" "strconv" + "strings" + "sync" "time" ) +type FileType string + +const ( + DOM FileType = "/dom.mob" + DEV FileType = "/devtools.mob" +) + +type Task struct { + id string + doms *bytes.Buffer + dome *bytes.Buffer + dev *bytes.Buffer +} + type Storage struct { cfg *config.Config s3 *storage.S3 startBytes []byte - totalSessions syncfloat64.Counter - sessionDOMSize syncfloat64.Histogram - sessionDevtoolsSize syncfloat64.Histogram - readingDOMTime syncfloat64.Histogram - readingTime syncfloat64.Histogram - archivingTime syncfloat64.Histogram + totalSessions syncfloat64.Counter + sessionDOMSize syncfloat64.Histogram + sessionDEVSize syncfloat64.Histogram + readingDOMTime syncfloat64.Histogram + readingDEVTime syncfloat64.Histogram + archivingDOMTime syncfloat64.Histogram + archivingDEVTime syncfloat64.Histogram + uploadingDOMTime syncfloat64.Histogram + uploadingDEVTime syncfloat64.Histogram + + tasks chan *Task + ready chan struct{} } func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Storage, error) { @@ -49,186 +71,235 @@ func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Stor if err != nil { log.Printf("can't create sessions_dt_size metric: %s", err) } - readingTime, err := metrics.RegisterHistogram("reading_duration") + readingDOMTime, err := metrics.RegisterHistogram("reading_duration") if err != nil { log.Printf("can't create reading_duration metric: %s", err) } - archivingTime, err := metrics.RegisterHistogram("archiving_duration") + readingDEVTime, err := metrics.RegisterHistogram("reading_dt_duration") + if err != nil { + log.Printf("can't create reading_duration metric: %s", err) + } + archivingDOMTime, err := metrics.RegisterHistogram("archiving_duration") if err != nil { log.Printf("can't create archiving_duration metric: %s", err) } - return &Storage{ - cfg: cfg, - s3: s3, - startBytes: make([]byte, cfg.FileSplitSize), - totalSessions: totalSessions, - sessionDOMSize: sessionDOMSize, - sessionDevtoolsSize: sessionDevtoolsSize, - readingTime: readingTime, - archivingTime: archivingTime, - }, nil + archivingDEVTime, err := metrics.RegisterHistogram("archiving_dt_duration") + if err != nil { + log.Printf("can't create archiving_duration metric: %s", err) + } + uploadingDOMTime, err := metrics.RegisterHistogram("uploading_duration") + if err != nil { + log.Printf("can't create uploading_duration metric: %s", err) + } + uploadingDEVTime, err := metrics.RegisterHistogram("uploading_dt_duration") + if err != nil { + log.Printf("can't create uploading_duration metric: %s", err) + } + newStorage := &Storage{ + cfg: cfg, + s3: s3, + startBytes: make([]byte, cfg.FileSplitSize), + totalSessions: totalSessions, + sessionDOMSize: sessionDOMSize, + sessionDEVSize: sessionDevtoolsSize, + readingDOMTime: readingDOMTime, + readingDEVTime: readingDEVTime, + archivingDOMTime: archivingDOMTime, + archivingDEVTime: archivingDEVTime, + uploadingDOMTime: uploadingDOMTime, + uploadingDEVTime: uploadingDEVTime, + tasks: make(chan *Task, 1), + ready: make(chan struct{}), + } + go newStorage.worker() + return newStorage, nil } -func (s *Storage) UploadSessionFiles(msg *messages.SessionEnd) error { - if err := s.uploadKey(msg.SessionID(), "/dom.mob", true, 5, msg.EncryptionKey); err != nil { +func (s *Storage) Wait() { + <-s.ready +} + +func (s *Storage) Upload(msg *messages.SessionEnd) (err error) { + // Generate file path + sessionID := strconv.FormatUint(msg.SessionID(), 10) + filePath := s.cfg.FSDir + "/" + sessionID + // Prepare sessions + newTask := &Task{ + id: sessionID, + } + wg := &sync.WaitGroup{} + wg.Add(2) + go func() { + if prepErr := s.prepareSession(filePath, DOM, newTask); prepErr != nil { + err = fmt.Errorf("prepareSession DOM err: %s", prepErr) + } + wg.Done() + }() + go func() { + if prepErr := s.prepareSession(filePath, DEV, newTask); prepErr != nil { + err = fmt.Errorf("prepareSession DEV err: %s", prepErr) + } + wg.Done() + }() + wg.Wait() + if err != nil { + if strings.Contains(err.Error(), "big file") { + log.Printf("%s, sess: %d", err, msg.SessionID()) + return nil + } return err } - if err := s.uploadKey(msg.SessionID(), "/devtools.mob", false, 4, msg.EncryptionKey); err != nil { - log.Printf("can't find devtools for session: %d, err: %s", msg.SessionID(), err) - } + // Send new task to worker + s.tasks <- newTask + // Unload worker + <-s.ready return nil } -// TODO: make a bit cleaner. -// TODO: Of course, I'll do! -func (s *Storage) uploadKey(sessID uint64, suffix string, shouldSplit bool, retryCount int, encryptionKey string) error { - if retryCount <= 0 { - return nil - } - start := time.Now() - fileName := strconv.FormatUint(sessID, 10) - mobFileName := fileName - if suffix == "/devtools.mob" { - mobFileName += "devtools" - } - filePath := s.cfg.FSDir + "/" + mobFileName - +func (s *Storage) openSession(filePath string) ([]byte, error) { // Check file size before download into memory info, err := os.Stat(filePath) - if err == nil { - if info.Size() > s.cfg.MaxFileSize { - log.Printf("big file, size: %d, session: %d", info.Size(), sessID) - return nil - } + if err == nil && info.Size() > s.cfg.MaxFileSize { + return nil, fmt.Errorf("big file, size: %d", info.Size()) } - file, err := os.Open(filePath) + // Read file into memory + return os.ReadFile(filePath) +} + +func (s *Storage) prepareSession(path string, tp FileType, task *Task) error { + // Open mob file + if tp == DEV { + path += "devtools" + } + startRead := time.Now() + mob, err := s.openSession(path) if err != nil { - return fmt.Errorf("File open error: %v; sessID: %s, part: %d, sessStart: %s\n", - err, fileName, sessID%16, - time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))), - ) + return err } - defer file.Close() - - var fileSize int64 = 0 - fileInfo, err := file.Stat() - if err != nil { - log.Printf("can't get file info: %s", err) - } else { - fileSize = fileInfo.Size() - } - - var encryptedData []byte - fileName += suffix - if shouldSplit { - nRead, err := file.Read(s.startBytes) - if err != nil { - log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s", - err, - fileName, - sessID%16, - time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))), - ) - time.AfterFunc(s.cfg.RetryTimeout, func() { - s.uploadKey(sessID, suffix, shouldSplit, retryCount-1, encryptionKey) - }) - return nil - } - s.readingTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds())) - - start = time.Now() - // Encrypt session file if we have encryption key - if encryptionKey != "" { - encryptedData, err = EncryptData(s.startBytes[:nRead], []byte(encryptionKey)) - if err != nil { - log.Printf("can't encrypt data: %s", err) - encryptedData = s.startBytes[:nRead] - } - } else { - encryptedData = s.startBytes[:nRead] - } - // Compress and save to s3 - startReader := bytes.NewBuffer(encryptedData) - if err := s.s3.Upload(s.gzipFile(startReader), fileName+"s", "application/octet-stream", true); err != nil { - log.Fatalf("Storage: start upload failed. %v\n", err) - } - // TODO: fix possible error (if we read less then FileSplitSize) - if nRead == s.cfg.FileSplitSize { - restPartSize := fileSize - int64(nRead) - fileData := make([]byte, restPartSize) - nRead, err = file.Read(fileData) - if err != nil { - log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s", - err, - fileName, - sessID%16, - time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))), - ) - return nil - } - if int64(nRead) != restPartSize { - log.Printf("can't read the rest part of file") - } - - // Encrypt session file if we have encryption key - if encryptionKey != "" { - encryptedData, err = EncryptData(fileData, []byte(encryptionKey)) - if err != nil { - log.Printf("can't encrypt data: %s", err) - encryptedData = fileData - } - } else { - encryptedData = fileData - } - // Compress and save to s3 - endReader := bytes.NewBuffer(encryptedData) - if err := s.s3.Upload(s.gzipFile(endReader), fileName+"e", "application/octet-stream", true); err != nil { - log.Fatalf("Storage: end upload failed. %v\n", err) - } - } - s.archivingTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds())) - } else { - start = time.Now() - fileData := make([]byte, fileSize) - nRead, err := file.Read(fileData) - if err != nil { - log.Printf("File read error: %s; sessID: %s, part: %d, sessStart: %s", - err, - fileName, - sessID%16, - time.UnixMilli(int64(flakeid.ExtractTimestamp(sessID))), - ) - return nil - } - if int64(nRead) != fileSize { - log.Printf("can't read the rest part of file") - } - - // Encrypt session file if we have encryption key - if encryptionKey != "" { - encryptedData, err = EncryptData(fileData, []byte(encryptionKey)) - if err != nil { - log.Printf("can't encrypt data: %s", err) - encryptedData = fileData - } - } else { - encryptedData = fileData - } - endReader := bytes.NewBuffer(encryptedData) - if err := s.s3.Upload(s.gzipFile(endReader), fileName, "application/octet-stream", true); err != nil { - log.Fatalf("Storage: end upload failed. %v\n", err) - } - s.archivingTime.Record(context.Background(), float64(time.Now().Sub(start).Milliseconds())) - } - - // Save metrics + durRead := time.Now().Sub(startRead).Milliseconds() + // Send metrics ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) - if shouldSplit { - s.totalSessions.Add(ctx, 1) - s.sessionDOMSize.Record(ctx, float64(fileSize)) + if tp == DOM { + s.sessionDOMSize.Record(ctx, float64(len(mob))) + s.readingDOMTime.Record(ctx, float64(durRead)) } else { - s.sessionDevtoolsSize.Record(ctx, float64(fileSize)) + s.sessionDEVSize.Record(ctx, float64(len(mob))) + s.readingDEVTime.Record(ctx, float64(durRead)) + } + // Encode and compress session + if tp == DEV { + startCompress := time.Now() + task.dev = s.compressSession(mob) + s.archivingDEVTime.Record(ctx, float64(time.Now().Sub(startCompress).Milliseconds())) + } else { + if len(mob) <= s.cfg.FileSplitSize { + startCompress := time.Now() + task.doms = s.compressSession(mob) + s.archivingDOMTime.Record(ctx, float64(time.Now().Sub(startCompress).Milliseconds())) + return nil + } + wg := &sync.WaitGroup{} + wg.Add(2) + var firstPart, secondPart int64 + go func() { + start := time.Now() + task.doms = s.compressSession(mob[:s.cfg.FileSplitSize]) + firstPart = time.Now().Sub(start).Milliseconds() + wg.Done() + }() + go func() { + start := time.Now() + task.dome = s.compressSession(mob[s.cfg.FileSplitSize:]) + secondPart = time.Now().Sub(start).Milliseconds() + wg.Done() + }() + wg.Wait() + s.archivingDOMTime.Record(ctx, float64(firstPart+secondPart)) } - return nil } + +func (s *Storage) encryptSession(data []byte, encryptionKey string) []byte { + var encryptedData []byte + var err error + if encryptionKey != "" { + encryptedData, err = EncryptData(data, []byte(encryptionKey)) + if err != nil { + log.Printf("can't encrypt data: %s", err) + encryptedData = data + } + } else { + encryptedData = data + } + return encryptedData +} + +func (s *Storage) compressSession(data []byte) *bytes.Buffer { + zippedMob := new(bytes.Buffer) + z, _ := gzip.NewWriterLevel(zippedMob, gzip.BestSpeed) + if _, err := z.Write(data); err != nil { + log.Printf("can't write session data to compressor: %s", err) + } + if err := z.Close(); err != nil { + log.Printf("can't close compressor: %s", err) + } + return zippedMob +} + +func (s *Storage) uploadSession(task *Task) { + wg := &sync.WaitGroup{} + wg.Add(3) + var ( + uploadDoms int64 = 0 + uploadDome int64 = 0 + uploadDev int64 = 0 + ) + go func() { + if task.doms != nil { + start := time.Now() + if err := s.s3.Upload(task.doms, task.id+string(DOM)+"s", "application/octet-stream", true); err != nil { + log.Fatalf("Storage: start upload failed. %s", err) + } + uploadDoms = time.Now().Sub(start).Milliseconds() + } + wg.Done() + }() + go func() { + if task.dome != nil { + start := time.Now() + if err := s.s3.Upload(task.dome, task.id+string(DOM)+"e", "application/octet-stream", true); err != nil { + log.Fatalf("Storage: start upload failed. %s", err) + } + uploadDome = time.Now().Sub(start).Milliseconds() + } + wg.Done() + }() + go func() { + if task.dev != nil { + start := time.Now() + if err := s.s3.Upload(task.dev, task.id+string(DEV), "application/octet-stream", true); err != nil { + log.Fatalf("Storage: start upload failed. %s", err) + } + uploadDev = time.Now().Sub(start).Milliseconds() + } + wg.Done() + }() + wg.Wait() + // Record metrics + ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) + s.uploadingDOMTime.Record(ctx, float64(uploadDoms+uploadDome)) + s.uploadingDEVTime.Record(ctx, float64(uploadDev)) + s.totalSessions.Add(ctx, 1) +} + +func (s *Storage) worker() { + for { + select { + case task := <-s.tasks: + s.uploadSession(task) + default: + // Signal that worker finished all tasks + s.ready <- struct{}{} + } + } +} diff --git a/backend/pkg/db/cache/messages-web.go b/backend/pkg/db/cache/messages-web.go index 931d1f639..1df3d1520 100644 --- a/backend/pkg/db/cache/messages-web.go +++ b/backend/pkg/db/cache/messages-web.go @@ -99,7 +99,7 @@ func (c *PGCache) InsertSessionReferrer(sessionID uint64, referrer string) error return c.Conn.InsertSessionReferrer(sessionID, referrer) } -func (c *PGCache) InsertWebFetchEvent(sessionID uint64, e *FetchEvent) error { +func (c *PGCache) InsertWebNetworkRequest(sessionID uint64, e *NetworkRequest) error { session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -108,10 +108,10 @@ func (c *PGCache) InsertWebFetchEvent(sessionID uint64, e *FetchEvent) error { if err != nil { return err } - return c.Conn.InsertWebFetchEvent(sessionID, session.ProjectID, project.SaveRequestPayloads, e) + return c.Conn.InsertWebNetworkRequest(sessionID, session.ProjectID, project.SaveRequestPayloads, e) } -func (c *PGCache) InsertWebGraphQLEvent(sessionID uint64, e *GraphQLEvent) error { +func (c *PGCache) InsertWebGraphQL(sessionID uint64, e *GraphQL) error { session, err := c.Cache.GetSession(sessionID) if err != nil { return err @@ -120,7 +120,7 @@ func (c *PGCache) InsertWebGraphQLEvent(sessionID uint64, e *GraphQLEvent) error if err != nil { return err } - return c.Conn.InsertWebGraphQLEvent(sessionID, session.ProjectID, project.SaveRequestPayloads, e) + return c.Conn.InsertWebGraphQL(sessionID, session.ProjectID, project.SaveRequestPayloads, e) } func (c *PGCache) InsertWebCustomEvent(sessionID uint64, e *CustomEvent) error { diff --git a/backend/pkg/db/postgres/bulk.go b/backend/pkg/db/postgres/bulk.go index 16f59efcd..7b9bf90c8 100644 --- a/backend/pkg/db/postgres/bulk.go +++ b/backend/pkg/db/postgres/bulk.go @@ -2,8 +2,14 @@ package postgres import ( "bytes" + "context" "errors" "fmt" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/metric/instrument/syncfloat64" + "log" + "openreplay/backend/pkg/monitoring" + "time" ) const ( @@ -18,13 +24,15 @@ type Bulk interface { } type bulkImpl struct { - conn Pool - table string - columns string - template string - setSize int - sizeLimit int - values []interface{} + conn Pool + table string + columns string + template string + setSize int + sizeLimit int + values []interface{} + bulkSize syncfloat64.Histogram + bulkDuration syncfloat64.Histogram } func (b *bulkImpl) Append(args ...interface{}) error { @@ -46,6 +54,8 @@ func (b *bulkImpl) Send() error { } func (b *bulkImpl) send() error { + start := time.Now() + size := len(b.values) / b.setSize request := bytes.NewBufferString(insertPrefix + b.table + b.columns + insertValues) args := make([]interface{}, b.setSize) for i := 0; i < len(b.values)/b.setSize; i++ { @@ -63,13 +73,19 @@ func (b *bulkImpl) send() error { if err != nil { return fmt.Errorf("send bulk err: %s", err) } + // Save bulk metrics + ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) + b.bulkDuration.Record(ctx, float64(time.Now().Sub(start).Milliseconds()), attribute.String("table", b.table)) + b.bulkSize.Record(ctx, float64(size), attribute.String("table", b.table)) return nil } -func NewBulk(conn Pool, table, columns, template string, setSize, sizeLimit int) (Bulk, error) { +func NewBulk(conn Pool, metrics *monitoring.Metrics, table, columns, template string, setSize, sizeLimit int) (Bulk, error) { switch { case conn == nil: return nil, errors.New("db conn is empty") + case metrics == nil: + return nil, errors.New("metrics is empty") case table == "": return nil, errors.New("table is empty") case columns == "": @@ -81,13 +97,23 @@ func NewBulk(conn Pool, table, columns, template string, setSize, sizeLimit int) case sizeLimit <= 0: return nil, errors.New("size limit is wrong") } + messagesInBulk, err := metrics.RegisterHistogram("messages_in_bulk") + if err != nil { + log.Printf("can't create messages_size metric: %s", err) + } + bulkInsertDuration, err := metrics.RegisterHistogram("bulk_insert_duration") + if err != nil { + log.Printf("can't create messages_size metric: %s", err) + } return &bulkImpl{ - conn: conn, - table: table, - columns: columns, - template: template, - setSize: setSize, - sizeLimit: sizeLimit, - values: make([]interface{}, 0, setSize*sizeLimit), + conn: conn, + table: table, + columns: columns, + template: template, + setSize: setSize, + sizeLimit: sizeLimit, + values: make([]interface{}, 0, setSize*sizeLimit), + bulkSize: messagesInBulk, + bulkDuration: bulkInsertDuration, }, nil } diff --git a/backend/pkg/db/postgres/connector.go b/backend/pkg/db/postgres/connector.go index 9e269fcc0..73859fd39 100644 --- a/backend/pkg/db/postgres/connector.go +++ b/backend/pkg/db/postgres/connector.go @@ -34,7 +34,15 @@ type Conn struct { customEvents Bulk webPageEvents Bulk webInputEvents Bulk - webGraphQLEvents Bulk + webGraphQL Bulk + webErrors Bulk + webErrorEvents Bulk + webErrorTags Bulk + webIssues Bulk + webIssueEvents Bulk + webCustomEvents Bulk + webClickEvents Bulk + webNetworkRequest Bulk sessionUpdates map[uint64]*sessionUpdates batchQueueLimit int batchSizeLimit int @@ -70,7 +78,7 @@ func NewConn(url string, queueLimit, sizeLimit int, metrics *monitoring.Metrics) if err != nil { log.Fatalf("can't create new pool wrapper: %s", err) } - conn.initBulks() + conn.initBulks(metrics) return conn } @@ -99,58 +107,122 @@ func (conn *Conn) initMetrics(metrics *monitoring.Metrics) { } } -func (conn *Conn) initBulks() { +func (conn *Conn) initBulks(metrics *monitoring.Metrics) { var err error - conn.autocompletes, err = NewBulk(conn.c, + conn.autocompletes, err = NewBulk(conn.c, metrics, "autocomplete", "(value, type, project_id)", "($%d, $%d, $%d)", - 3, 100) + 3, 200) if err != nil { - log.Fatalf("can't create autocomplete bulk") + log.Fatalf("can't create autocomplete bulk: %s", err) } - conn.requests, err = NewBulk(conn.c, + conn.requests, err = NewBulk(conn.c, metrics, "events_common.requests", "(session_id, timestamp, seq_index, url, duration, success)", "($%d, $%d, $%d, left($%d, 2700), $%d, $%d)", - 6, 100) + 6, 200) if err != nil { - log.Fatalf("can't create requests bulk") + log.Fatalf("can't create requests bulk: %s", err) } - conn.customEvents, err = NewBulk(conn.c, + conn.customEvents, err = NewBulk(conn.c, metrics, "events_common.customs", "(session_id, timestamp, seq_index, name, payload)", "($%d, $%d, $%d, left($%d, 2700), $%d)", - 5, 100) + 5, 200) if err != nil { - log.Fatalf("can't create customEvents bulk") + log.Fatalf("can't create customEvents bulk: %s", err) } - conn.webPageEvents, err = NewBulk(conn.c, + conn.webPageEvents, err = NewBulk(conn.c, metrics, "events.pages", "(session_id, message_id, timestamp, referrer, base_referrer, host, path, query, dom_content_loaded_time, "+ "load_time, response_end, first_paint_time, first_contentful_paint_time, speed_index, visually_complete, "+ "time_to_interactive, response_time, dom_building_time)", "($%d, $%d, $%d, $%d, $%d, $%d, $%d, $%d, NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, 0),"+ " NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, 0), NULLIF($%d, 0))", - 18, 100) + 18, 200) if err != nil { - log.Fatalf("can't create webPageEvents bulk") + log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webInputEvents, err = NewBulk(conn.c, + conn.webInputEvents, err = NewBulk(conn.c, metrics, "events.inputs", "(session_id, message_id, timestamp, value, label)", "($%d, $%d, $%d, $%d, NULLIF($%d,''))", - 5, 100) + 5, 200) if err != nil { - log.Fatalf("can't create webPageEvents bulk") + log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webGraphQLEvents, err = NewBulk(conn.c, + conn.webGraphQL, err = NewBulk(conn.c, metrics, "events.graphql", "(session_id, timestamp, message_id, name, request_body, response_body)", "($%d, $%d, $%d, left($%d, 2700), $%d, $%d)", - 6, 100) + 6, 200) if err != nil { - log.Fatalf("can't create webPageEvents bulk") + log.Fatalf("can't create webPageEvents bulk: %s", err) + } + conn.webErrors, err = NewBulk(conn.c, metrics, + "errors", + "(error_id, project_id, source, name, message, payload)", + "($%d, $%d, $%d, $%d, $%d, $%d::jsonb)", + 6, 200) + if err != nil { + log.Fatalf("can't create webErrors bulk: %s", err) + } + conn.webErrorEvents, err = NewBulk(conn.c, metrics, + "events.errors", + "(session_id, message_id, timestamp, error_id)", + "($%d, $%d, $%d, $%d)", + 4, 200) + if err != nil { + log.Fatalf("can't create webErrorEvents bulk: %s", err) + } + conn.webErrorTags, err = NewBulk(conn.c, metrics, + "public.errors_tags", + "(session_id, message_id, error_id, key, value)", + "($%d, $%d, $%d, $%d, $%d)", + 5, 200) + if err != nil { + log.Fatalf("can't create webErrorEvents bulk: %s", err) + } + conn.webIssues, err = NewBulk(conn.c, metrics, + "issues", + "(project_id, issue_id, type, context_string)", + "($%d, $%d, $%d, $%d)", + 4, 200) + if err != nil { + log.Fatalf("can't create webIssues bulk: %s", err) + } + conn.webIssueEvents, err = NewBulk(conn.c, metrics, + "events_common.issues", + "(session_id, issue_id, timestamp, seq_index, payload)", + "($%d, $%d, $%d, $%d, CAST($%d AS jsonb))", + 5, 200) + if err != nil { + log.Fatalf("can't create webIssueEvents bulk: %s", err) + } + conn.webCustomEvents, err = NewBulk(conn.c, metrics, + "events_common.customs", + "(session_id, seq_index, timestamp, name, payload, level)", + "($%d, $%d, $%d, left($%d, 2700), $%d, $%d)", + 6, 200) + if err != nil { + log.Fatalf("can't create webCustomEvents bulk: %s", err) + } + conn.webClickEvents, err = NewBulk(conn.c, metrics, + "events.clicks", + "(session_id, message_id, timestamp, label, selector, url, path)", + "($%d, $%d, $%d, NULLIF($%d, ''), $%d, $%d, $%d)", + 7, 200) + if err != nil { + log.Fatalf("can't create webClickEvents bulk: %s", err) + } + conn.webNetworkRequest, err = NewBulk(conn.c, metrics, + "events_common.requests", + "(session_id, timestamp, seq_index, url, host, path, query, request_body, response_body, status_code, method, duration, success)", + "($%d, $%d, $%d, left($%d, 2700), $%d, $%d, $%d, $%d, $%d, $%d::smallint, NULLIF($%d, '')::http_method, $%d, $%d)", + 13, 200) + if err != nil { + log.Fatalf("can't create webNetworkRequest bulk: %s", err) } } @@ -195,7 +267,14 @@ func (conn *Conn) updateSessionEvents(sessionID uint64, events, pages int) { if _, ok := conn.sessionUpdates[sessionID]; !ok { conn.sessionUpdates[sessionID] = NewSessionUpdates(sessionID) } - conn.sessionUpdates[sessionID].add(pages, events) + conn.sessionUpdates[sessionID].addEvents(pages, events) +} + +func (conn *Conn) updateSessionIssues(sessionID uint64, errors, issueScore int) { + if _, ok := conn.sessionUpdates[sessionID]; !ok { + conn.sessionUpdates[sessionID] = NewSessionUpdates(sessionID) + } + conn.sessionUpdates[sessionID].addIssues(errors, issueScore) } func (conn *Conn) sendBulks() { @@ -214,8 +293,32 @@ func (conn *Conn) sendBulks() { if err := conn.webInputEvents.Send(); err != nil { log.Printf("webInputEvents bulk send err: %s", err) } - if err := conn.webGraphQLEvents.Send(); err != nil { - log.Printf("webGraphQLEvents bulk send err: %s", err) + if err := conn.webGraphQL.Send(); err != nil { + log.Printf("webGraphQL bulk send err: %s", err) + } + if err := conn.webErrors.Send(); err != nil { + log.Printf("webErrors bulk send err: %s", err) + } + if err := conn.webErrorEvents.Send(); err != nil { + log.Printf("webErrorEvents bulk send err: %s", err) + } + if err := conn.webErrorTags.Send(); err != nil { + log.Printf("webErrorTags bulk send err: %s", err) + } + if err := conn.webIssues.Send(); err != nil { + log.Printf("webIssues bulk send err: %s", err) + } + if err := conn.webIssueEvents.Send(); err != nil { + log.Printf("webIssueEvents bulk send err: %s", err) + } + if err := conn.webCustomEvents.Send(); err != nil { + log.Printf("webCustomEvents bulk send err: %s", err) + } + if err := conn.webClickEvents.Send(); err != nil { + log.Printf("webClickEvents bulk send err: %s", err) + } + if err := conn.webNetworkRequest.Send(); err != nil { + log.Printf("webNetworkRequest bulk send err: %s", err) } } diff --git a/backend/pkg/db/postgres/messages-common.go b/backend/pkg/db/postgres/messages-common.go index fca11ce88..f2ea42520 100644 --- a/backend/pkg/db/postgres/messages-common.go +++ b/backend/pkg/db/postgres/messages-common.go @@ -146,70 +146,24 @@ func (conn *Conn) InsertMetadata(sessionID uint64, keyNo uint, value string) err return conn.c.Exec(fmt.Sprintf(sqlRequest, keyNo), value, sessionID) } -func (conn *Conn) InsertIssueEvent(sessionID uint64, projectID uint32, e *messages.IssueEvent) (err error) { - tx, err := conn.c.Begin() - if err != nil { - return err - } - defer func() { - if err != nil { - if rollbackErr := tx.rollback(); rollbackErr != nil { - log.Printf("rollback err: %s", rollbackErr) - } - } - }() +func (conn *Conn) InsertIssueEvent(sessionID uint64, projectID uint32, e *messages.IssueEvent) error { issueID := hashid.IssueID(projectID, e) - - // TEMP. TODO: nullable & json message field type payload := &e.Payload if *payload == "" || *payload == "{}" { payload = nil } - if err = tx.exec(` - INSERT INTO issues ( - project_id, issue_id, type, context_string - ) (SELECT - project_id, $2, $3, $4 - FROM sessions - WHERE session_id = $1 - )ON CONFLICT DO NOTHING`, - sessionID, issueID, e.Type, e.ContextString, - ); err != nil { - return err + if err := conn.webIssues.Append(projectID, issueID, e.Type, e.ContextString); err != nil { + log.Printf("insert web issue err: %s", err) } - if err = tx.exec(` - INSERT INTO events_common.issues ( - session_id, issue_id, timestamp, seq_index, payload - ) VALUES ( - $1, $2, $3, $4, CAST($5 AS jsonb) - )`, - sessionID, issueID, e.Timestamp, - truncSqIdx(e.MessageID), - payload, - ); err != nil { - return err + if err := conn.webIssueEvents.Append(sessionID, issueID, e.Timestamp, truncSqIdx(e.MessageID), payload); err != nil { + log.Printf("insert web issue event err: %s", err) } - if err = tx.exec(` - UPDATE sessions SET issue_score = issue_score + $2 - WHERE session_id = $1`, - sessionID, getIssueScore(e), - ); err != nil { - return err - } - // TODO: no redundancy. Deliver to UI in a different way + conn.updateSessionIssues(sessionID, 0, getIssueScore(e)) if e.Type == "custom" { - if err = tx.exec(` - INSERT INTO events_common.customs - (session_id, seq_index, timestamp, name, payload, level) - VALUES - ($1, $2, $3, left($4, 2700), $5, 'error') - `, - sessionID, truncSqIdx(e.MessageID), e.Timestamp, e.ContextString, e.Payload, - ); err != nil { - return err + if err := conn.webCustomEvents.Append(sessionID, truncSqIdx(e.MessageID), e.Timestamp, e.ContextString, e.Payload, "error"); err != nil { + log.Printf("insert web custom event err: %s", err) } } - err = tx.commit() - return + return nil } diff --git a/backend/pkg/db/postgres/messages-web.go b/backend/pkg/db/postgres/messages-web.go index 10cfad409..63669ecb7 100644 --- a/backend/pkg/db/postgres/messages-web.go +++ b/backend/pkg/db/postgres/messages-web.go @@ -9,9 +9,13 @@ import ( ) func (conn *Conn) InsertWebCustomEvent(sessionID uint64, projectID uint32, e *CustomEvent) error { - err := conn.InsertCustomEvent(sessionID, e.Timestamp, - truncSqIdx(e.MessageID), - e.Name, e.Payload) + err := conn.InsertCustomEvent( + sessionID, + uint64(e.Meta().Timestamp), + truncSqIdx(e.Meta().Index), + e.Name, + e.Payload, + ) if err == nil { conn.insertAutocompleteValue(sessionID, projectID, "CUSTOM", e.Name) } @@ -54,16 +58,12 @@ func (conn *Conn) InsertWebPageEvent(sessionID uint64, projectID uint32, e *Page } func (conn *Conn) InsertWebClickEvent(sessionID uint64, projectID uint32, e *ClickEvent) error { - sqlRequest := ` - INSERT INTO events.clicks - (session_id, message_id, timestamp, label, selector, url) - (SELECT - $1, $2, $3, NULLIF($4, ''), $5, host || path - FROM events.pages - WHERE session_id = $1 AND timestamp <= $3 ORDER BY timestamp DESC LIMIT 1 - ) - ` - conn.batchQueue(sessionID, sqlRequest, sessionID, truncSqIdx(e.MessageID), e.Timestamp, e.Label, e.Selector) + var host, path string + host, path, _, _ = url.GetURLParts(e.Url) + log.Println("insert web click:", host, path) + if err := conn.webClickEvents.Append(sessionID, truncSqIdx(e.MessageID), e.Timestamp, e.Label, e.Selector, host+path, path); err != nil { + log.Printf("insert web click err: %s", err) + } // Accumulate session updates and exec inside batch with another sql commands conn.updateSessionEvents(sessionID, 1, 0) // Add new value set to autocomplete bulk @@ -87,64 +87,24 @@ func (conn *Conn) InsertWebInputEvent(sessionID uint64, projectID uint32, e *Inp return nil } -func (conn *Conn) InsertWebErrorEvent(sessionID uint64, projectID uint32, e *types.ErrorEvent) (err error) { - tx, err := conn.c.Begin() - if err != nil { - return err - } - defer func() { - if err != nil { - if rollbackErr := tx.rollback(); rollbackErr != nil { - log.Printf("rollback err: %s", rollbackErr) - } - } - }() +func (conn *Conn) InsertWebErrorEvent(sessionID uint64, projectID uint32, e *types.ErrorEvent) error { errorID := e.ID(projectID) - - if err = tx.exec(` - INSERT INTO errors - (error_id, project_id, source, name, message, payload) - VALUES - ($1, $2, $3, $4, $5, $6::jsonb) - ON CONFLICT DO NOTHING`, - errorID, projectID, e.Source, e.Name, e.Message, e.Payload, - ); err != nil { - return err + if err := conn.webErrors.Append(errorID, projectID, e.Source, e.Name, e.Message, e.Payload); err != nil { + log.Printf("insert web error err: %s", err) } - if err = tx.exec(` - INSERT INTO events.errors - (session_id, message_id, timestamp, error_id) - VALUES - ($1, $2, $3, $4) - `, - sessionID, truncSqIdx(e.MessageID), e.Timestamp, errorID, - ); err != nil { - return err + if err := conn.webErrorEvents.Append(sessionID, truncSqIdx(e.MessageID), e.Timestamp, errorID); err != nil { + log.Printf("insert web error event err: %s", err) } - if err = tx.exec(` - UPDATE sessions SET errors_count = errors_count + 1, issue_score = issue_score + 1000 - WHERE session_id = $1`, - sessionID, - ); err != nil { - return err - } - err = tx.commit() - - // Insert tags - sqlRequest := ` - INSERT INTO public.errors_tags ( - session_id, message_id, error_id, key, value - ) VALUES ( - $1, $2, $3, $4, $5 - ) ON CONFLICT DO NOTHING` + conn.updateSessionIssues(sessionID, 1, 1000) for key, value := range e.Tags { - conn.batchQueue(sessionID, sqlRequest, sessionID, truncSqIdx(e.MessageID), errorID, key, value) + if err := conn.webErrorTags.Append(sessionID, truncSqIdx(e.MessageID), errorID, key, value); err != nil { + log.Printf("insert web error token err: %s", err) + } } - - return + return nil } -func (conn *Conn) InsertWebFetchEvent(sessionID uint64, projectID uint32, savePayload bool, e *FetchEvent) error { +func (conn *Conn) InsertWebNetworkRequest(sessionID uint64, projectID uint32, savePayload bool, e *NetworkRequest) error { var request, response *string if savePayload { request = &e.Request @@ -155,39 +115,18 @@ func (conn *Conn) InsertWebFetchEvent(sessionID uint64, projectID uint32, savePa if err != nil { return err } - - sqlRequest := ` - INSERT INTO events_common.requests ( - session_id, timestamp, seq_index, - url, host, path, query, - request_body, response_body, status_code, method, - duration, success - ) VALUES ( - $1, $2, $3, - left($4, 2700), $5, $6, $7, - $8, $9, $10::smallint, NULLIF($11, '')::http_method, - $12, $13 - ) ON CONFLICT DO NOTHING` - conn.batchQueue(sessionID, sqlRequest, - sessionID, e.Timestamp, truncSqIdx(e.MessageID), - e.URL, host, path, query, - request, response, e.Status, url.EnsureMethod(e.Method), - e.Duration, e.Status < 400, - ) - - // Record approximate message size - conn.updateBatchSize(sessionID, len(sqlRequest)+len(e.URL)+len(host)+len(path)+len(query)+ - len(e.Request)+len(e.Response)+len(url.EnsureMethod(e.Method))+8*5+1) + conn.webNetworkRequest.Append(sessionID, e.Meta().Timestamp, truncSqIdx(e.Meta().Index), e.URL, host, path, query, + request, response, e.Status, url.EnsureMethod(e.Method), e.Duration, e.Status < 400) return nil } -func (conn *Conn) InsertWebGraphQLEvent(sessionID uint64, projectID uint32, savePayload bool, e *GraphQLEvent) error { +func (conn *Conn) InsertWebGraphQL(sessionID uint64, projectID uint32, savePayload bool, e *GraphQL) error { var request, response *string if savePayload { request = &e.Variables response = &e.Response } - if err := conn.webGraphQLEvents.Append(sessionID, e.Timestamp, truncSqIdx(e.MessageID), e.OperationName, request, response); err != nil { + if err := conn.webGraphQL.Append(sessionID, e.Meta().Timestamp, truncSqIdx(e.Meta().Index), e.OperationName, request, response); err != nil { log.Printf("insert web graphQL event err: %s", err) } conn.insertAutocompleteValue(sessionID, projectID, "GRAPHQL", e.OperationName) diff --git a/backend/pkg/db/postgres/session-updates.go b/backend/pkg/db/postgres/session-updates.go index 47e374355..fbed2cdc7 100644 --- a/backend/pkg/db/postgres/session-updates.go +++ b/backend/pkg/db/postgres/session-updates.go @@ -1,12 +1,14 @@ package postgres // Mechanism of combination several session updates into one -const sessionUpdateReq = `UPDATE sessions SET pages_count = pages_count + $1, events_count = events_count + $2 WHERE session_id = $3` +const sessionUpdateReq = `UPDATE sessions SET pages_count = pages_count + $1, events_count = events_count + $2, errors_count = errors_count + $3, issue_score = issue_score + $4 WHERE session_id = $5` type sessionUpdates struct { sessionID uint64 pages int events int + errors int + issues int } func NewSessionUpdates(sessionID uint64) *sessionUpdates { @@ -14,17 +16,24 @@ func NewSessionUpdates(sessionID uint64) *sessionUpdates { sessionID: sessionID, pages: 0, events: 0, + errors: 0, + issues: 0, } } -func (su *sessionUpdates) add(pages, events int) { +func (su *sessionUpdates) addEvents(pages, events int) { su.pages += pages su.events += events } +func (su *sessionUpdates) addIssues(errors, issues int) { + su.errors += errors + su.issues += issues +} + func (su *sessionUpdates) request() (string, []interface{}) { if su.pages == 0 && su.events == 0 { return "", nil } - return sessionUpdateReq, []interface{}{su.pages, su.events, su.sessionID} + return sessionUpdateReq, []interface{}{su.pages, su.events, su.errors, su.issues, su.sessionID} } diff --git a/backend/pkg/handlers/custom/eventMapper.go b/backend/pkg/handlers/custom/eventMapper.go index 3280e7ebc..a85ebbdf0 100644 --- a/backend/pkg/handlers/custom/eventMapper.go +++ b/backend/pkg/handlers/custom/eventMapper.go @@ -69,13 +69,6 @@ func (b *EventMapper) Handle(message Message, messageID uint64, timestamp uint64 Type: getResourceType(msg.Initiator, msg.URL), Success: msg.Duration != 0, } - case *RawCustomEvent: - return &CustomEvent{ - MessageID: messageID, - Timestamp: timestamp, - Name: msg.Name, - Payload: msg.Payload, - } case *CustomIssue: return &IssueEvent{ Type: "custom", @@ -84,32 +77,6 @@ func (b *EventMapper) Handle(message Message, messageID uint64, timestamp uint64 ContextString: msg.Name, Payload: msg.Payload, } - case *Fetch: - return &FetchEvent{ - MessageID: messageID, - Timestamp: msg.Timestamp, - Method: msg.Method, - URL: msg.URL, - Request: msg.Request, - Response: msg.Response, - Status: msg.Status, - Duration: msg.Duration, - } - case *GraphQL: - return &GraphQLEvent{ - MessageID: messageID, - Timestamp: timestamp, - OperationKind: msg.OperationKind, - OperationName: msg.OperationName, - Variables: msg.Variables, - Response: msg.Response, - } - case *StateAction: - return &StateActionEvent{ - MessageID: messageID, - Timestamp: timestamp, - Type: msg.Type, - } } return nil } diff --git a/backend/pkg/handlers/web/networkIssue.go b/backend/pkg/handlers/web/networkIssue.go index 67522c850..20ef412dd 100644 --- a/backend/pkg/handlers/web/networkIssue.go +++ b/backend/pkg/handlers/web/networkIssue.go @@ -7,7 +7,7 @@ import ( /* Handler name: NetworkIssue Input events: ResourceTiming, - Fetch + NetworkRequest Output event: IssueEvent */ @@ -19,21 +19,7 @@ func (f *NetworkIssueDetector) Build() Message { func (f *NetworkIssueDetector) Handle(message Message, messageID uint64, timestamp uint64) Message { switch msg := message.(type) { - // case *ResourceTiming: - // success := msg.Duration != 0 // The only available way here - // if !success { - // issueType := "missing_resource" - // if msg.Initiator == "fetch" || msg.Initiator == "xmlhttprequest" { - // issueType = "bad_request" - // } - // return &IssueEvent{ - // Type: issueType, - // MessageID: messageID, - // Timestamp: msg.Timestamp, - // ContextString: msg.URL, - // } - // } - case *Fetch: + case *NetworkRequest: if msg.Status >= 400 { return &IssueEvent{ Type: "bad_request", diff --git a/backend/pkg/log/queue.go b/backend/pkg/log/queue.go deleted file mode 100644 index e9ac5dc1f..000000000 --- a/backend/pkg/log/queue.go +++ /dev/null @@ -1,79 +0,0 @@ -package log - -import ( - "fmt" - "log" - "time" - - "openreplay/backend/pkg/messages" -) - -type partitionStats struct { - maxts int64 - mints int64 - lastts int64 - lastID uint64 - count int -} - -// Update partition statistic -func (prt *partitionStats) update(m *messages.BatchInfo) { - if prt.maxts < m.Timestamp() { - prt.maxts = m.Timestamp() - } - if prt.mints > m.Timestamp() || prt.mints == 0 { - prt.mints = m.Timestamp() - } - prt.lastts = m.Timestamp() - prt.lastID = m.ID() - prt.count += 1 -} - -type queueStats struct { - prts map[int32]*partitionStats - tick <-chan time.Time -} - -type QueueStats interface { - Collect(msg messages.Message) -} - -func NewQueueStats(sec int) *queueStats { - return &queueStats{ - prts: make(map[int32]*partitionStats), - tick: time.Tick(time.Duration(sec) * time.Second), - } -} - -// Collect writes new data to partition statistic -func (qs *queueStats) Collect(msg messages.Message) { - prti := int32(msg.SessionID() % 16) // TODO use GetKeyPartition from kafka/key.go - prt, ok := qs.prts[prti] - if !ok { - qs.prts[prti] = &partitionStats{} - prt = qs.prts[prti] - } - prt.update(msg.Meta().Batch()) - - select { - case <-qs.tick: - qs.log() - qs.reset() - default: - } -} - -// Print to console collected statistics -func (qs *queueStats) log() { - s := "Queue Statistics: " - for i, p := range qs.prts { - s = fmt.Sprintf("%v | %v:: lastTS %v, lastID %v, count %v, maxTS %v, minTS %v", - s, i, p.lastts, p.lastID, p.count, p.maxts, p.mints) - } - log.Println(s) -} - -// Clear all queue partitions -func (qs *queueStats) reset() { - qs.prts = make(map[int32]*partitionStats) -} diff --git a/backend/pkg/messages/bytes.go b/backend/pkg/messages/bytes.go new file mode 100644 index 000000000..0576201ea --- /dev/null +++ b/backend/pkg/messages/bytes.go @@ -0,0 +1,119 @@ +package messages + +import ( + "errors" + "fmt" + "io" +) + +type BytesReader interface { + ReadSize() (uint64, error) + ReadByte() (byte, error) + ReadUint() (uint64, error) + ReadInt() (int64, error) + ReadBoolean() (bool, error) + ReadString() (string, error) + Data() []byte + Pointer() int64 + SetPointer(p int64) +} + +type bytesReaderImpl struct { + data []byte + curr int64 +} + +func NewBytesReader(data []byte) BytesReader { + return &bytesReaderImpl{ + data: data, + } +} + +func (m *bytesReaderImpl) ReadSize() (uint64, error) { + if len(m.data)-int(m.curr) < 3 { + return 0, fmt.Errorf("out of range") + } + var size uint64 + for i, b := range m.data[m.curr : m.curr+3] { + size += uint64(b) << (8 * i) + } + m.curr += 3 + return size, nil +} + +func (m *bytesReaderImpl) ReadByte() (byte, error) { + if int(m.curr) >= len(m.data) { + return 0, io.EOF + } + m.curr++ + return m.data[m.curr-1], nil +} + +func (m *bytesReaderImpl) ReadUint() (uint64, error) { + var x uint64 + var s uint + i := 0 + for { + b, err := m.ReadByte() + if err != nil { + return x, err + } + if b < 0x80 { + if i > 9 || i == 9 && b > 1 { + return x, errors.New("uint overflow") + } + return x | uint64(b)<> 1) + if err != nil { + return x, err + } + if ux&1 != 0 { + x = ^x + } + return x, err +} + +func (m *bytesReaderImpl) ReadBoolean() (bool, error) { + val, err := m.ReadByte() + if err != nil { + return false, err + } + return val == 1, nil +} + +func (m *bytesReaderImpl) ReadString() (string, error) { + l, err := m.ReadUint() + if err != nil { + return "", err + } + if l > 10e6 { + return "", errors.New("too long string") + } + if len(m.data)-int(m.curr) < int(l) { + return "", fmt.Errorf("out of range") + } + str := string(m.data[m.curr : int(m.curr)+int(l)]) + m.curr += int64(l) + return str, nil +} + +func (m *bytesReaderImpl) Data() []byte { + return m.data +} + +func (m *bytesReaderImpl) Pointer() int64 { + return m.curr +} + +func (m *bytesReaderImpl) SetPointer(p int64) { + m.curr = p +} diff --git a/backend/pkg/messages/cache.go b/backend/pkg/messages/cache.go new file mode 100644 index 000000000..b3c83a0e3 --- /dev/null +++ b/backend/pkg/messages/cache.go @@ -0,0 +1,22 @@ +package messages + +type pageLocations struct { + urls map[uint64]string +} + +func NewPageLocations() *pageLocations { + return &pageLocations{urls: make(map[uint64]string)} +} + +func (p *pageLocations) Set(sessID uint64, url string) { + p.urls[sessID] = url +} + +func (p *pageLocations) Get(sessID uint64) string { + url := p.urls[sessID] + return url +} + +func (p *pageLocations) Delete(sessID uint64) { + delete(p.urls, sessID) +} diff --git a/backend/pkg/messages/filters.go b/backend/pkg/messages/filters.go index 399c1863c..6c96383c9 100644 --- a/backend/pkg/messages/filters.go +++ b/backend/pkg/messages/filters.go @@ -2,7 +2,7 @@ package messages func IsReplayerType(id int) bool { - return 80 != id && 81 != id && 82 != id && 1 != id && 3 != id && 17 != id && 23 != id && 24 != id && 25 != id && 26 != id && 27 != id && 28 != id && 29 != id && 30 != id && 31 != id && 32 != id && 33 != id && 35 != id && 36 != id && 42 != id && 43 != id && 50 != id && 51 != id && 52 != id && 53 != id && 56 != id && 62 != id && 63 != id && 64 != id && 66 != id && 78 != id && 126 != id && 127 != id && 107 != id && 91 != id && 92 != id && 94 != id && 95 != id && 97 != id && 98 != id && 99 != id && 101 != id && 104 != id && 110 != id && 111 != id + return 80 != id && 81 != id && 82 != id && 1 != id && 3 != id && 17 != id && 23 != id && 24 != id && 25 != id && 26 != id && 27 != id && 28 != id && 29 != id && 30 != id && 31 != id && 32 != id && 33 != id && 35 != id && 42 != id && 52 != id && 56 != id && 62 != id && 63 != id && 64 != id && 66 != id && 78 != id && 126 != id && 127 != id && 107 != id && 91 != id && 92 != id && 94 != id && 95 != id && 97 != id && 98 != id && 99 != id && 101 != id && 104 != id && 110 != id && 111 != id } func IsIOSType(id int) bool { diff --git a/backend/pkg/messages/iterator.go b/backend/pkg/messages/iterator.go index 7b7991b19..69e1f02bc 100644 --- a/backend/pkg/messages/iterator.go +++ b/backend/pkg/messages/iterator.go @@ -1,9 +1,7 @@ package messages import ( - "bytes" "fmt" - "io" "log" ) @@ -26,10 +24,15 @@ type messageIteratorImpl struct { broken bool messageInfo *message batchInfo *BatchInfo + urls *pageLocations } func NewMessageIterator(messageHandler MessageHandler, messageFilter []int, autoDecode bool) MessageIterator { - iter := &messageIteratorImpl{handler: messageHandler, autoDecode: autoDecode} + iter := &messageIteratorImpl{ + handler: messageHandler, + autoDecode: autoDecode, + urls: NewPageLocations(), + } if len(messageFilter) != 0 { filter := make(map[int]struct{}, len(messageFilter)) for _, msgType := range messageFilter { @@ -54,76 +57,32 @@ func (i *messageIteratorImpl) prepareVars(batchInfo *BatchInfo) { } func (i *messageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { + // Create new message reader + reader := NewMessageReader(batchData) + + // Pre-decode batch data + if err := reader.Parse(); err != nil { + log.Printf("pre-decode batch err: %s, info: %s", err, batchInfo.Info()) + return + } + // Prepare iterator before processing messages in batch i.prepareVars(batchInfo) - // Initialize batch reader - reader := bytes.NewReader(batchData) - - // Process until end of batch or parsing error - for { + for reader.Next() { // Increase message index (can be overwritten by batch info message) i.messageInfo.Index++ - if i.broken { - log.Printf("skipping broken batch, info: %s", i.batchInfo.Info()) - return - } - - if i.canSkip { - if _, err := reader.Seek(int64(i.size), io.SeekCurrent); err != nil { - log.Printf("can't skip message: %s, info: %s", err, i.batchInfo.Info()) - return - } - } - i.canSkip = false - - // Read message type - msgType, err := ReadUint(reader) - if err != nil { - if err != io.EOF { - log.Printf("can't read message type: %s, info: %s", err, i.batchInfo.Info()) - } - return - } - - var msg Message - // Read message body (and decode if protocol version less than 1) - if i.version > 0 && messageHasSize(msgType) { - // Read message size if it is a new protocol version - i.size, err = ReadSize(reader) - if err != nil { - log.Printf("can't read message size: %s, info: %s", err, i.batchInfo.Info()) - return - } - msg = &RawMessage{ - tp: msgType, - size: i.size, - reader: reader, - raw: batchData, - skipped: &i.canSkip, - broken: &i.broken, - meta: i.messageInfo, - } - i.canSkip = true - } else { - msg, err = ReadMessage(msgType, reader) - if err != nil { - if err != io.EOF { - log.Printf("can't read message body: %s, info: %s", err, i.batchInfo.Info()) - } - return - } - msg = transformDeprecated(msg) - } + msg := reader.Message() // Preprocess "system" messages if _, ok := i.preFilter[msg.TypeID()]; ok { msg = msg.Decode() if msg == nil { - log.Printf("decode error, type: %d, info: %s", msgType, i.batchInfo.Info()) + log.Printf("decode error, type: %d, info: %s", msg.TypeID(), i.batchInfo.Info()) return } + msg = transformDeprecated(msg) if err := i.preprocessing(msg); err != nil { log.Printf("message preprocessing err: %s", err) return @@ -140,7 +99,7 @@ func (i *messageIteratorImpl) Iterate(batchData []byte, batchInfo *BatchInfo) { if i.autoDecode { msg = msg.Decode() if msg == nil { - log.Printf("decode error, type: %d, info: %s", msgType, i.batchInfo.Info()) + log.Printf("decode error, type: %d, info: %s", msg.TypeID(), i.batchInfo.Info()) return } } @@ -171,7 +130,7 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error { if m.Timestamp == 0 { i.zeroTsLog("BatchMetadata") } - i.messageInfo.Url = m.Url + i.messageInfo.Url = m.Location i.version = m.Version i.batchInfo.version = m.Version @@ -184,6 +143,10 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error { if m.Timestamp == 0 { i.zeroTsLog("BatchMeta") } + // Try to get saved session's page url + if savedURL := i.urls.Get(i.messageInfo.batch.sessionID); savedURL != "" { + i.messageInfo.Url = savedURL + } case *Timestamp: i.messageInfo.Timestamp = int64(m.Timestamp) @@ -204,9 +167,13 @@ func (i *messageIteratorImpl) preprocessing(msg Message) error { if m.Timestamp == 0 { i.zeroTsLog("SessionEnd") } + // Delete session from urls cache layer + i.urls.Delete(i.messageInfo.batch.sessionID) case *SetPageLocation: i.messageInfo.Url = m.URL + // Save session page url in cache for using in next batches + i.urls.Set(i.messageInfo.batch.sessionID, m.URL) } return nil } diff --git a/backend/pkg/messages/legacy-message-transform.go b/backend/pkg/messages/legacy-message-transform.go index 5e2bd3ed7..72757baf4 100644 --- a/backend/pkg/messages/legacy-message-transform.go +++ b/backend/pkg/messages/legacy-message-transform.go @@ -14,6 +14,17 @@ func transformDeprecated(msg Message) Message { Timestamp: m.Timestamp, EncryptionKey: "", } + case *Fetch: + return &NetworkRequest{ + Type: "fetch", + Method: m.Method, + URL: m.URL, + Request: m.Request, + Response: m.Response, + Status: m.Status, + Timestamp: m.Timestamp, + Duration: m.Duration, + } } return msg } diff --git a/backend/pkg/messages/message.go b/backend/pkg/messages/message.go index 7bb2572eb..3a8e029d5 100644 --- a/backend/pkg/messages/message.go +++ b/backend/pkg/messages/message.go @@ -4,7 +4,6 @@ import "fmt" type Message interface { Encode() []byte - EncodeWithIndex() []byte Decode() Message TypeID() int Meta() *message diff --git a/backend/pkg/messages/messages.go b/backend/pkg/messages/messages.go index 138f8dcb7..418d34867 100644 --- a/backend/pkg/messages/messages.go +++ b/backend/pkg/messages/messages.go @@ -1,206 +1,103 @@ // Auto-generated, do not edit package messages -import "encoding/binary" - const ( - MsgBatchMeta = 80 - - MsgBatchMetadata = 81 - - MsgPartitionedMessage = 82 - - MsgTimestamp = 0 - - MsgSessionStart = 1 - - MsgSessionEndDeprecated = 3 - - MsgSetPageLocation = 4 - - MsgSetViewportSize = 5 - - MsgSetViewportScroll = 6 - - MsgCreateDocument = 7 - - MsgCreateElementNode = 8 - - MsgCreateTextNode = 9 - - MsgMoveNode = 10 - - MsgRemoveNode = 11 - - MsgSetNodeAttribute = 12 - - MsgRemoveNodeAttribute = 13 - - MsgSetNodeData = 14 - - MsgSetCSSData = 15 - - MsgSetNodeScroll = 16 - - MsgSetInputTarget = 17 - - MsgSetInputValue = 18 - - MsgSetInputChecked = 19 - - MsgMouseMove = 20 - - MsgConsoleLog = 22 - - MsgPageLoadTiming = 23 - - MsgPageRenderTiming = 24 - - MsgJSExceptionDeprecated = 25 - - MsgIntegrationEvent = 26 - - MsgRawCustomEvent = 27 - - MsgUserID = 28 - - MsgUserAnonymousID = 29 - - MsgMetadata = 30 - - MsgPageEvent = 31 - - MsgInputEvent = 32 - - MsgClickEvent = 33 - - MsgResourceEvent = 35 - - MsgCustomEvent = 36 - - MsgCSSInsertRule = 37 - - MsgCSSDeleteRule = 38 - - MsgFetch = 39 - - MsgProfiler = 40 - - MsgOTable = 41 - - MsgStateAction = 42 - - MsgStateActionEvent = 43 - - MsgRedux = 44 - - MsgVuex = 45 - - MsgMobX = 46 - - MsgNgRx = 47 - - MsgGraphQL = 48 - - MsgPerformanceTrack = 49 - - MsgGraphQLEvent = 50 - - MsgFetchEvent = 51 - - MsgDOMDrop = 52 - - MsgResourceTiming = 53 - - MsgConnectionInformation = 54 - - MsgSetPageVisibility = 55 - - MsgPerformanceTrackAggr = 56 - - MsgLoadFontFace = 57 - - MsgSetNodeFocus = 58 - - MsgLongTask = 59 - - MsgSetNodeAttributeURLBased = 60 - - MsgSetCSSDataURLBased = 61 - - MsgIssueEvent = 62 - - MsgTechnicalInfo = 63 - - MsgCustomIssue = 64 - - MsgAssetCache = 66 - - MsgCSSInsertRuleURLBased = 67 - - MsgMouseClick = 69 - - MsgCreateIFrameDocument = 70 - - MsgAdoptedSSReplaceURLBased = 71 - - MsgAdoptedSSReplace = 72 - + MsgBatchMeta = 80 + MsgBatchMetadata = 81 + MsgPartitionedMessage = 82 + MsgTimestamp = 0 + MsgSessionStart = 1 + MsgSessionEndDeprecated = 3 + MsgSetPageLocation = 4 + MsgSetViewportSize = 5 + MsgSetViewportScroll = 6 + MsgCreateDocument = 7 + MsgCreateElementNode = 8 + MsgCreateTextNode = 9 + MsgMoveNode = 10 + MsgRemoveNode = 11 + MsgSetNodeAttribute = 12 + MsgRemoveNodeAttribute = 13 + MsgSetNodeData = 14 + MsgSetCSSData = 15 + MsgSetNodeScroll = 16 + MsgSetInputTarget = 17 + MsgSetInputValue = 18 + MsgSetInputChecked = 19 + MsgMouseMove = 20 + MsgNetworkRequest = 21 + MsgConsoleLog = 22 + MsgPageLoadTiming = 23 + MsgPageRenderTiming = 24 + MsgJSExceptionDeprecated = 25 + MsgIntegrationEvent = 26 + MsgCustomEvent = 27 + MsgUserID = 28 + MsgUserAnonymousID = 29 + MsgMetadata = 30 + MsgPageEvent = 31 + MsgInputEvent = 32 + MsgClickEvent = 33 + MsgResourceEvent = 35 + MsgCSSInsertRule = 37 + MsgCSSDeleteRule = 38 + MsgFetch = 39 + MsgProfiler = 40 + MsgOTable = 41 + MsgStateAction = 42 + MsgRedux = 44 + MsgVuex = 45 + MsgMobX = 46 + MsgNgRx = 47 + MsgGraphQL = 48 + MsgPerformanceTrack = 49 + MsgDOMDrop = 52 + MsgResourceTiming = 53 + MsgConnectionInformation = 54 + MsgSetPageVisibility = 55 + MsgPerformanceTrackAggr = 56 + MsgLoadFontFace = 57 + MsgSetNodeFocus = 58 + MsgLongTask = 59 + MsgSetNodeAttributeURLBased = 60 + MsgSetCSSDataURLBased = 61 + MsgIssueEvent = 62 + MsgTechnicalInfo = 63 + MsgCustomIssue = 64 + MsgAssetCache = 66 + MsgCSSInsertRuleURLBased = 67 + MsgMouseClick = 69 + MsgCreateIFrameDocument = 70 + MsgAdoptedSSReplaceURLBased = 71 + MsgAdoptedSSReplace = 72 MsgAdoptedSSInsertRuleURLBased = 73 - - MsgAdoptedSSInsertRule = 74 - - MsgAdoptedSSDeleteRule = 75 - - MsgAdoptedSSAddOwner = 76 - - MsgAdoptedSSRemoveOwner = 77 - - MsgZustand = 79 - - MsgJSException = 78 - - MsgSessionEnd = 126 - - MsgSessionSearch = 127 - - MsgIOSBatchMeta = 107 - - MsgIOSSessionStart = 90 - - MsgIOSSessionEnd = 91 - - MsgIOSMetadata = 92 - - MsgIOSCustomEvent = 93 - - MsgIOSUserID = 94 - - MsgIOSUserAnonymousID = 95 - - MsgIOSScreenChanges = 96 - - MsgIOSCrash = 97 - - MsgIOSScreenEnter = 98 - - MsgIOSScreenLeave = 99 - - MsgIOSClickEvent = 100 - - MsgIOSInputEvent = 101 - - MsgIOSPerformanceEvent = 102 - - MsgIOSLog = 103 - - MsgIOSInternalError = 104 - - MsgIOSNetworkCall = 105 - - MsgIOSPerformanceAggregated = 110 - - MsgIOSIssueEvent = 111 + MsgAdoptedSSInsertRule = 74 + MsgAdoptedSSDeleteRule = 75 + MsgAdoptedSSAddOwner = 76 + MsgAdoptedSSRemoveOwner = 77 + MsgZustand = 79 + MsgJSException = 78 + MsgSessionEnd = 126 + MsgSessionSearch = 127 + MsgIOSBatchMeta = 107 + MsgIOSSessionStart = 90 + MsgIOSSessionEnd = 91 + MsgIOSMetadata = 92 + MsgIOSCustomEvent = 93 + MsgIOSUserID = 94 + MsgIOSUserAnonymousID = 95 + MsgIOSScreenChanges = 96 + MsgIOSCrash = 97 + MsgIOSScreenEnter = 98 + MsgIOSScreenLeave = 99 + MsgIOSClickEvent = 100 + MsgIOSInputEvent = 101 + MsgIOSPerformanceEvent = 102 + MsgIOSLog = 103 + MsgIOSInternalError = 104 + MsgIOSNetworkCall = 105 + MsgIOSPerformanceAggregated = 110 + MsgIOSIssueEvent = 111 ) type BatchMeta struct { @@ -220,17 +117,6 @@ func (msg *BatchMeta) Encode() []byte { return buf[:p] } -func (msg *BatchMeta) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *BatchMeta) Decode() Message { return msg } @@ -260,17 +146,6 @@ func (msg *BatchMetadata) Encode() []byte { return buf[:p] } -func (msg *BatchMetadata) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *BatchMetadata) Decode() Message { return msg } @@ -294,17 +169,6 @@ func (msg *PartitionedMessage) Encode() []byte { return buf[:p] } -func (msg *PartitionedMessage) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *PartitionedMessage) Decode() Message { return msg } @@ -326,17 +190,6 @@ func (msg *Timestamp) Encode() []byte { return buf[:p] } -func (msg *Timestamp) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *Timestamp) Decode() Message { return msg } @@ -388,17 +241,6 @@ func (msg *SessionStart) Encode() []byte { return buf[:p] } -func (msg *SessionStart) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SessionStart) Decode() Message { return msg } @@ -420,17 +262,6 @@ func (msg *SessionEndDeprecated) Encode() []byte { return buf[:p] } -func (msg *SessionEndDeprecated) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SessionEndDeprecated) Decode() Message { return msg } @@ -456,17 +287,6 @@ func (msg *SetPageLocation) Encode() []byte { return buf[:p] } -func (msg *SetPageLocation) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetPageLocation) Decode() Message { return msg } @@ -490,17 +310,6 @@ func (msg *SetViewportSize) Encode() []byte { return buf[:p] } -func (msg *SetViewportSize) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetViewportSize) Decode() Message { return msg } @@ -524,17 +333,6 @@ func (msg *SetViewportScroll) Encode() []byte { return buf[:p] } -func (msg *SetViewportScroll) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetViewportScroll) Decode() Message { return msg } @@ -555,17 +353,6 @@ func (msg *CreateDocument) Encode() []byte { return buf[:p] } -func (msg *CreateDocument) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *CreateDocument) Decode() Message { return msg } @@ -595,17 +382,6 @@ func (msg *CreateElementNode) Encode() []byte { return buf[:p] } -func (msg *CreateElementNode) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *CreateElementNode) Decode() Message { return msg } @@ -631,17 +407,6 @@ func (msg *CreateTextNode) Encode() []byte { return buf[:p] } -func (msg *CreateTextNode) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *CreateTextNode) Decode() Message { return msg } @@ -667,17 +432,6 @@ func (msg *MoveNode) Encode() []byte { return buf[:p] } -func (msg *MoveNode) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *MoveNode) Decode() Message { return msg } @@ -699,17 +453,6 @@ func (msg *RemoveNode) Encode() []byte { return buf[:p] } -func (msg *RemoveNode) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *RemoveNode) Decode() Message { return msg } @@ -735,17 +478,6 @@ func (msg *SetNodeAttribute) Encode() []byte { return buf[:p] } -func (msg *SetNodeAttribute) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetNodeAttribute) Decode() Message { return msg } @@ -769,17 +501,6 @@ func (msg *RemoveNodeAttribute) Encode() []byte { return buf[:p] } -func (msg *RemoveNodeAttribute) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *RemoveNodeAttribute) Decode() Message { return msg } @@ -803,17 +524,6 @@ func (msg *SetNodeData) Encode() []byte { return buf[:p] } -func (msg *SetNodeData) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetNodeData) Decode() Message { return msg } @@ -837,17 +547,6 @@ func (msg *SetCSSData) Encode() []byte { return buf[:p] } -func (msg *SetCSSData) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetCSSData) Decode() Message { return msg } @@ -873,17 +572,6 @@ func (msg *SetNodeScroll) Encode() []byte { return buf[:p] } -func (msg *SetNodeScroll) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetNodeScroll) Decode() Message { return msg } @@ -907,17 +595,6 @@ func (msg *SetInputTarget) Encode() []byte { return buf[:p] } -func (msg *SetInputTarget) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetInputTarget) Decode() Message { return msg } @@ -943,17 +620,6 @@ func (msg *SetInputValue) Encode() []byte { return buf[:p] } -func (msg *SetInputValue) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetInputValue) Decode() Message { return msg } @@ -977,17 +643,6 @@ func (msg *SetInputChecked) Encode() []byte { return buf[:p] } -func (msg *SetInputChecked) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetInputChecked) Decode() Message { return msg } @@ -1011,17 +666,6 @@ func (msg *MouseMove) Encode() []byte { return buf[:p] } -func (msg *MouseMove) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *MouseMove) Decode() Message { return msg } @@ -1030,6 +674,41 @@ func (msg *MouseMove) TypeID() int { return 20 } +type NetworkRequest struct { + message + Type string + Method string + URL string + Request string + Response string + Status uint64 + Timestamp uint64 + Duration uint64 +} + +func (msg *NetworkRequest) Encode() []byte { + buf := make([]byte, 81+len(msg.Type)+len(msg.Method)+len(msg.URL)+len(msg.Request)+len(msg.Response)) + buf[0] = 21 + p := 1 + p = WriteString(msg.Type, buf, p) + p = WriteString(msg.Method, buf, p) + p = WriteString(msg.URL, buf, p) + p = WriteString(msg.Request, buf, p) + p = WriteString(msg.Response, buf, p) + p = WriteUint(msg.Status, buf, p) + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Duration, buf, p) + return buf[:p] +} + +func (msg *NetworkRequest) Decode() Message { + return msg +} + +func (msg *NetworkRequest) TypeID() int { + return 21 +} + type ConsoleLog struct { message Level string @@ -1045,17 +724,6 @@ func (msg *ConsoleLog) Encode() []byte { return buf[:p] } -func (msg *ConsoleLog) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *ConsoleLog) Decode() Message { return msg } @@ -1093,17 +761,6 @@ func (msg *PageLoadTiming) Encode() []byte { return buf[:p] } -func (msg *PageLoadTiming) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *PageLoadTiming) Decode() Message { return msg } @@ -1129,17 +786,6 @@ func (msg *PageRenderTiming) Encode() []byte { return buf[:p] } -func (msg *PageRenderTiming) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *PageRenderTiming) Decode() Message { return msg } @@ -1165,17 +811,6 @@ func (msg *JSExceptionDeprecated) Encode() []byte { return buf[:p] } -func (msg *JSExceptionDeprecated) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *JSExceptionDeprecated) Decode() Message { return msg } @@ -1205,17 +840,6 @@ func (msg *IntegrationEvent) Encode() []byte { return buf[:p] } -func (msg *IntegrationEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IntegrationEvent) Decode() Message { return msg } @@ -1224,13 +848,13 @@ func (msg *IntegrationEvent) TypeID() int { return 26 } -type RawCustomEvent struct { +type CustomEvent struct { message Name string Payload string } -func (msg *RawCustomEvent) Encode() []byte { +func (msg *CustomEvent) Encode() []byte { buf := make([]byte, 21+len(msg.Name)+len(msg.Payload)) buf[0] = 27 p := 1 @@ -1239,22 +863,11 @@ func (msg *RawCustomEvent) Encode() []byte { return buf[:p] } -func (msg *RawCustomEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - -func (msg *RawCustomEvent) Decode() Message { +func (msg *CustomEvent) Decode() Message { return msg } -func (msg *RawCustomEvent) TypeID() int { +func (msg *CustomEvent) TypeID() int { return 27 } @@ -1271,17 +884,6 @@ func (msg *UserID) Encode() []byte { return buf[:p] } -func (msg *UserID) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *UserID) Decode() Message { return msg } @@ -1303,17 +905,6 @@ func (msg *UserAnonymousID) Encode() []byte { return buf[:p] } -func (msg *UserAnonymousID) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *UserAnonymousID) Decode() Message { return msg } @@ -1337,17 +928,6 @@ func (msg *Metadata) Encode() []byte { return buf[:p] } -func (msg *Metadata) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *Metadata) Decode() Message { return msg } @@ -1401,17 +981,6 @@ func (msg *PageEvent) Encode() []byte { return buf[:p] } -func (msg *PageEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *PageEvent) Decode() Message { return msg } @@ -1441,17 +1010,6 @@ func (msg *InputEvent) Encode() []byte { return buf[:p] } -func (msg *InputEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *InputEvent) Decode() Message { return msg } @@ -1481,17 +1039,6 @@ func (msg *ClickEvent) Encode() []byte { return buf[:p] } -func (msg *ClickEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *ClickEvent) Decode() Message { return msg } @@ -1535,17 +1082,6 @@ func (msg *ResourceEvent) Encode() []byte { return buf[:p] } -func (msg *ResourceEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *ResourceEvent) Decode() Message { return msg } @@ -1554,44 +1090,6 @@ func (msg *ResourceEvent) TypeID() int { return 35 } -type CustomEvent struct { - message - MessageID uint64 - Timestamp uint64 - Name string - Payload string -} - -func (msg *CustomEvent) Encode() []byte { - buf := make([]byte, 41+len(msg.Name)+len(msg.Payload)) - buf[0] = 36 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.Name, buf, p) - p = WriteString(msg.Payload, buf, p) - return buf[:p] -} - -func (msg *CustomEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - -func (msg *CustomEvent) Decode() Message { - return msg -} - -func (msg *CustomEvent) TypeID() int { - return 36 -} - type CSSInsertRule struct { message ID uint64 @@ -1609,17 +1107,6 @@ func (msg *CSSInsertRule) Encode() []byte { return buf[:p] } -func (msg *CSSInsertRule) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *CSSInsertRule) Decode() Message { return msg } @@ -1643,17 +1130,6 @@ func (msg *CSSDeleteRule) Encode() []byte { return buf[:p] } -func (msg *CSSDeleteRule) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *CSSDeleteRule) Decode() Message { return msg } @@ -1687,17 +1163,6 @@ func (msg *Fetch) Encode() []byte { return buf[:p] } -func (msg *Fetch) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *Fetch) Decode() Message { return msg } @@ -1725,17 +1190,6 @@ func (msg *Profiler) Encode() []byte { return buf[:p] } -func (msg *Profiler) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *Profiler) Decode() Message { return msg } @@ -1759,17 +1213,6 @@ func (msg *OTable) Encode() []byte { return buf[:p] } -func (msg *OTable) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *OTable) Decode() Message { return msg } @@ -1791,17 +1234,6 @@ func (msg *StateAction) Encode() []byte { return buf[:p] } -func (msg *StateAction) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *StateAction) Decode() Message { return msg } @@ -1810,42 +1242,6 @@ func (msg *StateAction) TypeID() int { return 42 } -type StateActionEvent struct { - message - MessageID uint64 - Timestamp uint64 - Type string -} - -func (msg *StateActionEvent) Encode() []byte { - buf := make([]byte, 31+len(msg.Type)) - buf[0] = 43 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.Type, buf, p) - return buf[:p] -} - -func (msg *StateActionEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - -func (msg *StateActionEvent) Decode() Message { - return msg -} - -func (msg *StateActionEvent) TypeID() int { - return 43 -} - type Redux struct { message Action string @@ -1863,17 +1259,6 @@ func (msg *Redux) Encode() []byte { return buf[:p] } -func (msg *Redux) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *Redux) Decode() Message { return msg } @@ -1897,17 +1282,6 @@ func (msg *Vuex) Encode() []byte { return buf[:p] } -func (msg *Vuex) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *Vuex) Decode() Message { return msg } @@ -1931,17 +1305,6 @@ func (msg *MobX) Encode() []byte { return buf[:p] } -func (msg *MobX) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *MobX) Decode() Message { return msg } @@ -1967,17 +1330,6 @@ func (msg *NgRx) Encode() []byte { return buf[:p] } -func (msg *NgRx) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *NgRx) Decode() Message { return msg } @@ -2005,17 +1357,6 @@ func (msg *GraphQL) Encode() []byte { return buf[:p] } -func (msg *GraphQL) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *GraphQL) Decode() Message { return msg } @@ -2043,17 +1384,6 @@ func (msg *PerformanceTrack) Encode() []byte { return buf[:p] } -func (msg *PerformanceTrack) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *PerformanceTrack) Decode() Message { return msg } @@ -2062,94 +1392,6 @@ func (msg *PerformanceTrack) TypeID() int { return 49 } -type GraphQLEvent struct { - message - MessageID uint64 - Timestamp uint64 - OperationKind string - OperationName string - Variables string - Response string -} - -func (msg *GraphQLEvent) Encode() []byte { - buf := make([]byte, 61+len(msg.OperationKind)+len(msg.OperationName)+len(msg.Variables)+len(msg.Response)) - buf[0] = 50 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.OperationKind, buf, p) - p = WriteString(msg.OperationName, buf, p) - p = WriteString(msg.Variables, buf, p) - p = WriteString(msg.Response, buf, p) - return buf[:p] -} - -func (msg *GraphQLEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - -func (msg *GraphQLEvent) Decode() Message { - return msg -} - -func (msg *GraphQLEvent) TypeID() int { - return 50 -} - -type FetchEvent struct { - message - MessageID uint64 - Timestamp uint64 - Method string - URL string - Request string - Response string - Status uint64 - Duration uint64 -} - -func (msg *FetchEvent) Encode() []byte { - buf := make([]byte, 81+len(msg.Method)+len(msg.URL)+len(msg.Request)+len(msg.Response)) - buf[0] = 51 - p := 1 - p = WriteUint(msg.MessageID, buf, p) - p = WriteUint(msg.Timestamp, buf, p) - p = WriteString(msg.Method, buf, p) - p = WriteString(msg.URL, buf, p) - p = WriteString(msg.Request, buf, p) - p = WriteString(msg.Response, buf, p) - p = WriteUint(msg.Status, buf, p) - p = WriteUint(msg.Duration, buf, p) - return buf[:p] -} - -func (msg *FetchEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - -func (msg *FetchEvent) Decode() Message { - return msg -} - -func (msg *FetchEvent) TypeID() int { - return 51 -} - type DOMDrop struct { message Timestamp uint64 @@ -2163,17 +1405,6 @@ func (msg *DOMDrop) Encode() []byte { return buf[:p] } -func (msg *DOMDrop) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *DOMDrop) Decode() Message { return msg } @@ -2209,17 +1440,6 @@ func (msg *ResourceTiming) Encode() []byte { return buf[:p] } -func (msg *ResourceTiming) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *ResourceTiming) Decode() Message { return msg } @@ -2243,17 +1463,6 @@ func (msg *ConnectionInformation) Encode() []byte { return buf[:p] } -func (msg *ConnectionInformation) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *ConnectionInformation) Decode() Message { return msg } @@ -2275,17 +1484,6 @@ func (msg *SetPageVisibility) Encode() []byte { return buf[:p] } -func (msg *SetPageVisibility) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetPageVisibility) Decode() Message { return msg } @@ -2333,17 +1531,6 @@ func (msg *PerformanceTrackAggr) Encode() []byte { return buf[:p] } -func (msg *PerformanceTrackAggr) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *PerformanceTrackAggr) Decode() Message { return msg } @@ -2371,17 +1558,6 @@ func (msg *LoadFontFace) Encode() []byte { return buf[:p] } -func (msg *LoadFontFace) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *LoadFontFace) Decode() Message { return msg } @@ -2403,17 +1579,6 @@ func (msg *SetNodeFocus) Encode() []byte { return buf[:p] } -func (msg *SetNodeFocus) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetNodeFocus) Decode() Message { return msg } @@ -2447,17 +1612,6 @@ func (msg *LongTask) Encode() []byte { return buf[:p] } -func (msg *LongTask) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *LongTask) Decode() Message { return msg } @@ -2485,17 +1639,6 @@ func (msg *SetNodeAttributeURLBased) Encode() []byte { return buf[:p] } -func (msg *SetNodeAttributeURLBased) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetNodeAttributeURLBased) Decode() Message { return msg } @@ -2521,17 +1664,6 @@ func (msg *SetCSSDataURLBased) Encode() []byte { return buf[:p] } -func (msg *SetCSSDataURLBased) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SetCSSDataURLBased) Decode() Message { return msg } @@ -2563,17 +1695,6 @@ func (msg *IssueEvent) Encode() []byte { return buf[:p] } -func (msg *IssueEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IssueEvent) Decode() Message { return msg } @@ -2597,17 +1718,6 @@ func (msg *TechnicalInfo) Encode() []byte { return buf[:p] } -func (msg *TechnicalInfo) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *TechnicalInfo) Decode() Message { return msg } @@ -2631,17 +1741,6 @@ func (msg *CustomIssue) Encode() []byte { return buf[:p] } -func (msg *CustomIssue) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *CustomIssue) Decode() Message { return msg } @@ -2663,17 +1762,6 @@ func (msg *AssetCache) Encode() []byte { return buf[:p] } -func (msg *AssetCache) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *AssetCache) Decode() Message { return msg } @@ -2701,17 +1789,6 @@ func (msg *CSSInsertRuleURLBased) Encode() []byte { return buf[:p] } -func (msg *CSSInsertRuleURLBased) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *CSSInsertRuleURLBased) Decode() Message { return msg } @@ -2739,17 +1816,6 @@ func (msg *MouseClick) Encode() []byte { return buf[:p] } -func (msg *MouseClick) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *MouseClick) Decode() Message { return msg } @@ -2773,17 +1839,6 @@ func (msg *CreateIFrameDocument) Encode() []byte { return buf[:p] } -func (msg *CreateIFrameDocument) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *CreateIFrameDocument) Decode() Message { return msg } @@ -2809,17 +1864,6 @@ func (msg *AdoptedSSReplaceURLBased) Encode() []byte { return buf[:p] } -func (msg *AdoptedSSReplaceURLBased) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *AdoptedSSReplaceURLBased) Decode() Message { return msg } @@ -2843,17 +1887,6 @@ func (msg *AdoptedSSReplace) Encode() []byte { return buf[:p] } -func (msg *AdoptedSSReplace) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *AdoptedSSReplace) Decode() Message { return msg } @@ -2881,17 +1914,6 @@ func (msg *AdoptedSSInsertRuleURLBased) Encode() []byte { return buf[:p] } -func (msg *AdoptedSSInsertRuleURLBased) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *AdoptedSSInsertRuleURLBased) Decode() Message { return msg } @@ -2917,17 +1939,6 @@ func (msg *AdoptedSSInsertRule) Encode() []byte { return buf[:p] } -func (msg *AdoptedSSInsertRule) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *AdoptedSSInsertRule) Decode() Message { return msg } @@ -2951,17 +1962,6 @@ func (msg *AdoptedSSDeleteRule) Encode() []byte { return buf[:p] } -func (msg *AdoptedSSDeleteRule) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *AdoptedSSDeleteRule) Decode() Message { return msg } @@ -2985,17 +1985,6 @@ func (msg *AdoptedSSAddOwner) Encode() []byte { return buf[:p] } -func (msg *AdoptedSSAddOwner) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *AdoptedSSAddOwner) Decode() Message { return msg } @@ -3019,17 +2008,6 @@ func (msg *AdoptedSSRemoveOwner) Encode() []byte { return buf[:p] } -func (msg *AdoptedSSRemoveOwner) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *AdoptedSSRemoveOwner) Decode() Message { return msg } @@ -3053,17 +2031,6 @@ func (msg *Zustand) Encode() []byte { return buf[:p] } -func (msg *Zustand) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *Zustand) Decode() Message { return msg } @@ -3091,17 +2058,6 @@ func (msg *JSException) Encode() []byte { return buf[:p] } -func (msg *JSException) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *JSException) Decode() Message { return msg } @@ -3125,17 +2081,6 @@ func (msg *SessionEnd) Encode() []byte { return buf[:p] } -func (msg *SessionEnd) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SessionEnd) Decode() Message { return msg } @@ -3159,17 +2104,6 @@ func (msg *SessionSearch) Encode() []byte { return buf[:p] } -func (msg *SessionSearch) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *SessionSearch) Decode() Message { return msg } @@ -3195,17 +2129,6 @@ func (msg *IOSBatchMeta) Encode() []byte { return buf[:p] } -func (msg *IOSBatchMeta) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSBatchMeta) Decode() Message { return msg } @@ -3245,17 +2168,6 @@ func (msg *IOSSessionStart) Encode() []byte { return buf[:p] } -func (msg *IOSSessionStart) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSSessionStart) Decode() Message { return msg } @@ -3277,17 +2189,6 @@ func (msg *IOSSessionEnd) Encode() []byte { return buf[:p] } -func (msg *IOSSessionEnd) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSSessionEnd) Decode() Message { return msg } @@ -3315,17 +2216,6 @@ func (msg *IOSMetadata) Encode() []byte { return buf[:p] } -func (msg *IOSMetadata) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSMetadata) Decode() Message { return msg } @@ -3353,17 +2243,6 @@ func (msg *IOSCustomEvent) Encode() []byte { return buf[:p] } -func (msg *IOSCustomEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSCustomEvent) Decode() Message { return msg } @@ -3389,17 +2268,6 @@ func (msg *IOSUserID) Encode() []byte { return buf[:p] } -func (msg *IOSUserID) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSUserID) Decode() Message { return msg } @@ -3425,17 +2293,6 @@ func (msg *IOSUserAnonymousID) Encode() []byte { return buf[:p] } -func (msg *IOSUserAnonymousID) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSUserAnonymousID) Decode() Message { return msg } @@ -3467,17 +2324,6 @@ func (msg *IOSScreenChanges) Encode() []byte { return buf[:p] } -func (msg *IOSScreenChanges) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSScreenChanges) Decode() Message { return msg } @@ -3507,17 +2353,6 @@ func (msg *IOSCrash) Encode() []byte { return buf[:p] } -func (msg *IOSCrash) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSCrash) Decode() Message { return msg } @@ -3545,17 +2380,6 @@ func (msg *IOSScreenEnter) Encode() []byte { return buf[:p] } -func (msg *IOSScreenEnter) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSScreenEnter) Decode() Message { return msg } @@ -3583,17 +2407,6 @@ func (msg *IOSScreenLeave) Encode() []byte { return buf[:p] } -func (msg *IOSScreenLeave) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSScreenLeave) Decode() Message { return msg } @@ -3623,17 +2436,6 @@ func (msg *IOSClickEvent) Encode() []byte { return buf[:p] } -func (msg *IOSClickEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSClickEvent) Decode() Message { return msg } @@ -3663,17 +2465,6 @@ func (msg *IOSInputEvent) Encode() []byte { return buf[:p] } -func (msg *IOSInputEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSInputEvent) Decode() Message { return msg } @@ -3701,17 +2492,6 @@ func (msg *IOSPerformanceEvent) Encode() []byte { return buf[:p] } -func (msg *IOSPerformanceEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSPerformanceEvent) Decode() Message { return msg } @@ -3739,17 +2519,6 @@ func (msg *IOSLog) Encode() []byte { return buf[:p] } -func (msg *IOSLog) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSLog) Decode() Message { return msg } @@ -3775,17 +2544,6 @@ func (msg *IOSInternalError) Encode() []byte { return buf[:p] } -func (msg *IOSInternalError) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSInternalError) Decode() Message { return msg } @@ -3823,17 +2581,6 @@ func (msg *IOSNetworkCall) Encode() []byte { return buf[:p] } -func (msg *IOSNetworkCall) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSNetworkCall) Decode() Message { return msg } @@ -3881,17 +2628,6 @@ func (msg *IOSPerformanceAggregated) Encode() []byte { return buf[:p] } -func (msg *IOSPerformanceAggregated) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSPerformanceAggregated) Decode() Message { return msg } @@ -3921,17 +2657,6 @@ func (msg *IOSIssueEvent) Encode() []byte { return buf[:p] } -func (msg *IOSIssueEvent) EncodeWithIndex() []byte { - encoded := msg.Encode() - if IsIOSType(msg.TypeID()) { - return encoded - } - data := make([]byte, len(encoded)+8) - copy(data[8:], encoded[:]) - binary.LittleEndian.PutUint64(data[0:], msg.Meta().Index) - return data -} - func (msg *IOSIssueEvent) Decode() Message { return msg } diff --git a/backend/pkg/messages/primitives.go b/backend/pkg/messages/primitives.go index 1d3d2410b..3e47a3943 100644 --- a/backend/pkg/messages/primitives.go +++ b/backend/pkg/messages/primitives.go @@ -1,11 +1,9 @@ package messages import ( - "encoding/json" "errors" "fmt" "io" - "log" ) var ( @@ -21,19 +19,6 @@ func ReadByte(reader io.Reader) (byte, error) { return one[0], nil } -func ReadData(reader io.Reader) ([]byte, error) { - n, err := ReadUint(reader) - if err != nil { - return nil, err - } - p := make([]byte, n) - _, err = io.ReadFull(reader, p) - if err != nil { - return nil, err - } - return p, nil -} - func ReadUint(reader io.Reader) (uint64, error) { var x uint64 var s uint @@ -55,6 +40,16 @@ func ReadUint(reader io.Reader) (uint64, error) { } } +func WriteUint(v uint64, buf []byte, p int) int { + for v >= 0x80 { + buf[p] = byte(v) | 0x80 + v >>= 7 + p++ + } + buf[p] = byte(v) + return p + 1 +} + func ReadInt(reader io.Reader) (int64, error) { ux, err := ReadUint(reader) x := int64(ux >> 1) @@ -67,6 +62,14 @@ func ReadInt(reader io.Reader) (int64, error) { return x, err } +func WriteInt(v int64, buf []byte, p int) int { + uv := uint64(v) << 1 + if v < 0 { + uv = ^uv + } + return WriteUint(uv, buf, p) +} + func ReadBoolean(reader io.Reader) (bool, error) { p := make([]byte, 1) _, err := io.ReadFull(reader, p) @@ -76,6 +79,15 @@ func ReadBoolean(reader io.Reader) (bool, error) { return p[0] == 1, nil } +func WriteBoolean(v bool, buf []byte, p int) int { + if v { + buf[p] = 1 + } else { + buf[p] = 0 + } + return p + 1 +} + func ReadString(reader io.Reader) (string, error) { l, err := ReadUint(reader) if err != nil { @@ -92,73 +104,11 @@ func ReadString(reader io.Reader) (string, error) { return string(buf), nil } -func ReadJson(reader io.Reader) (interface{}, error) { - jsonData, err := ReadData(reader) - if err != nil { - return nil, err - } - var v interface{} - if err = json.Unmarshal(jsonData, &v); err != nil { - return nil, err - } - return v, nil -} - -func WriteUint(v uint64, buf []byte, p int) int { - for v >= 0x80 { - buf[p] = byte(v) | 0x80 - v >>= 7 - p++ - } - buf[p] = byte(v) - return p + 1 -} - -func WriteInt(v int64, buf []byte, p int) int { - uv := uint64(v) << 1 - if v < 0 { - uv = ^uv - } - return WriteUint(uv, buf, p) -} - -func WriteBoolean(v bool, buf []byte, p int) int { - if v { - buf[p] = 1 - } else { - buf[p] = 0 - } - return p + 1 -} - func WriteString(str string, buf []byte, p int) int { p = WriteUint(uint64(len(str)), buf, p) return p + copy(buf[p:], str) } -func WriteData(data []byte, buf []byte, p int) int { - p = WriteUint(uint64(len(data)), buf, p) - return p + copy(buf[p:], data) -} - -func WriteJson(v interface{}, buf []byte, p int) int { - data, err := json.Marshal(v) - if err != nil { - log.Printf("JSON encoding error: %v", err) - return WriteString("null", buf, p) - } - return WriteData(data, buf, p) -} - -func WriteSize(size uint64, buf []byte, p int) { - var m uint64 = 255 - for i := 0; i < 3; i++ { - buf[p+i] = byte(size & m) - size = size >> 8 - } - fmt.Println(buf) -} - func ReadSize(reader io.Reader) (uint64, error) { n, err := io.ReadFull(reader, three) if err != nil { diff --git a/backend/pkg/messages/raw.go b/backend/pkg/messages/raw.go index dbc71f4e6..44f666c69 100644 --- a/backend/pkg/messages/raw.go +++ b/backend/pkg/messages/raw.go @@ -1,75 +1,23 @@ package messages import ( - "bytes" - "encoding/binary" - "io" "log" ) // RawMessage is a not decoded message type RawMessage struct { - tp uint64 - size uint64 - data []byte - reader *bytes.Reader - raw []byte - meta *message - encoded bool - skipped *bool - broken *bool + tp uint64 + data []byte + broken *bool + meta *message } func (m *RawMessage) Encode() []byte { - if m.encoded { - return m.data - } - // Try to avoid EOF error - if m.reader.Len() < int(m.size) { - return nil - } - // Get current batch position - currPos, err := m.reader.Seek(0, io.SeekCurrent) - if err != nil { - log.Printf("can't get current batch position: %s", err) - return nil - } - // "Move" message type - if currPos == 0 { - log.Printf("can't move message type, curr position = %d", currPos) - return nil - } - // Dirty hack to avoid extra memory allocation - m.raw[currPos-1] = uint8(m.tp) - m.data = m.raw[currPos-1 : currPos+int64(m.size)] - m.encoded = true return m.data } -func (m *RawMessage) EncodeWithIndex() []byte { - if !m.encoded { - if m.Encode() == nil { - *m.broken = true - return nil - } - } - if IsIOSType(int(m.tp)) { - return m.data - } - data := make([]byte, len(m.data)+8) - copy(data[8:], m.data[:]) - binary.LittleEndian.PutUint64(data[0:], m.Meta().Index) - return data -} - func (m *RawMessage) Decode() Message { - if !m.encoded { - if m.Encode() == nil { - *m.broken = true - return nil - } - } - msg, err := ReadMessage(m.tp, bytes.NewReader(m.data[1:])) + msg, err := ReadMessage(m.tp, NewBytesReader(m.data[1:])) if err != nil { log.Printf("decode err: %s", err) *m.broken = true diff --git a/backend/pkg/messages/read-message.go b/backend/pkg/messages/read-message.go index 5988ebe62..29dddb02f 100644 --- a/backend/pkg/messages/read-message.go +++ b/backend/pkg/messages/read-message.go @@ -3,2053 +3,1889 @@ package messages import ( "fmt" - "io" ) -func DecodeBatchMeta(reader io.Reader) (Message, error) { +func DecodeBatchMeta(reader BytesReader) (Message, error) { var err error = nil msg := &BatchMeta{} - if msg.PageNo, err = ReadUint(reader); err != nil { + if msg.PageNo, err = reader.ReadUint(); err != nil { return nil, err } - if msg.FirstIndex, err = ReadUint(reader); err != nil { + if msg.FirstIndex, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Timestamp, err = ReadInt(reader); err != nil { + if msg.Timestamp, err = reader.ReadInt(); err != nil { return nil, err } return msg, err } -func DecodeBatchMetadata(reader io.Reader) (Message, error) { +func DecodeBatchMetadata(reader BytesReader) (Message, error) { var err error = nil msg := &BatchMetadata{} - if msg.Version, err = ReadUint(reader); err != nil { + if msg.Version, err = reader.ReadUint(); err != nil { return nil, err } - if msg.PageNo, err = ReadUint(reader); err != nil { + if msg.PageNo, err = reader.ReadUint(); err != nil { return nil, err } - if msg.FirstIndex, err = ReadUint(reader); err != nil { + if msg.FirstIndex, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Timestamp, err = ReadInt(reader); err != nil { + if msg.Timestamp, err = reader.ReadInt(); err != nil { return nil, err } - if msg.Location, err = ReadString(reader); err != nil { + if msg.Location, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodePartitionedMessage(reader io.Reader) (Message, error) { +func DecodePartitionedMessage(reader BytesReader) (Message, error) { var err error = nil msg := &PartitionedMessage{} - if msg.PartNo, err = ReadUint(reader); err != nil { + if msg.PartNo, err = reader.ReadUint(); err != nil { return nil, err } - if msg.PartTotal, err = ReadUint(reader); err != nil { + if msg.PartTotal, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeTimestamp(reader io.Reader) (Message, error) { +func DecodeTimestamp(reader BytesReader) (Message, error) { var err error = nil msg := &Timestamp{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeSessionStart(reader io.Reader) (Message, error) { +func DecodeSessionStart(reader BytesReader) (Message, error) { var err error = nil msg := &SessionStart{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.ProjectID, err = ReadUint(reader); err != nil { + if msg.ProjectID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.TrackerVersion, err = ReadString(reader); err != nil { + if msg.TrackerVersion, err = reader.ReadString(); err != nil { return nil, err } - if msg.RevID, err = ReadString(reader); err != nil { + if msg.RevID, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserUUID, err = ReadString(reader); err != nil { + if msg.UserUUID, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserAgent, err = ReadString(reader); err != nil { + if msg.UserAgent, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserOS, err = ReadString(reader); err != nil { + if msg.UserOS, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserOSVersion, err = ReadString(reader); err != nil { + if msg.UserOSVersion, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserBrowser, err = ReadString(reader); err != nil { + if msg.UserBrowser, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserBrowserVersion, err = ReadString(reader); err != nil { + if msg.UserBrowserVersion, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserDevice, err = ReadString(reader); err != nil { + if msg.UserDevice, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserDeviceType, err = ReadString(reader); err != nil { + if msg.UserDeviceType, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserDeviceMemorySize, err = ReadUint(reader); err != nil { + if msg.UserDeviceMemorySize, err = reader.ReadUint(); err != nil { return nil, err } - if msg.UserDeviceHeapSize, err = ReadUint(reader); err != nil { + if msg.UserDeviceHeapSize, err = reader.ReadUint(); err != nil { return nil, err } - if msg.UserCountry, err = ReadString(reader); err != nil { + if msg.UserCountry, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserID, err = ReadString(reader); err != nil { + if msg.UserID, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeSessionEndDeprecated(reader io.Reader) (Message, error) { +func DecodeSessionEndDeprecated(reader BytesReader) (Message, error) { var err error = nil msg := &SessionEndDeprecated{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeSetPageLocation(reader io.Reader) (Message, error) { +func DecodeSetPageLocation(reader BytesReader) (Message, error) { var err error = nil msg := &SetPageLocation{} - if msg.URL, err = ReadString(reader); err != nil { + if msg.URL, err = reader.ReadString(); err != nil { return nil, err } - if msg.Referrer, err = ReadString(reader); err != nil { + if msg.Referrer, err = reader.ReadString(); err != nil { return nil, err } - if msg.NavigationStart, err = ReadUint(reader); err != nil { + if msg.NavigationStart, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeSetViewportSize(reader io.Reader) (Message, error) { +func DecodeSetViewportSize(reader BytesReader) (Message, error) { var err error = nil msg := &SetViewportSize{} - if msg.Width, err = ReadUint(reader); err != nil { + if msg.Width, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Height, err = ReadUint(reader); err != nil { + if msg.Height, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeSetViewportScroll(reader io.Reader) (Message, error) { +func DecodeSetViewportScroll(reader BytesReader) (Message, error) { var err error = nil msg := &SetViewportScroll{} - if msg.X, err = ReadInt(reader); err != nil { + if msg.X, err = reader.ReadInt(); err != nil { return nil, err } - if msg.Y, err = ReadInt(reader); err != nil { + if msg.Y, err = reader.ReadInt(); err != nil { return nil, err } return msg, err } -func DecodeCreateDocument(reader io.Reader) (Message, error) { +func DecodeCreateDocument(reader BytesReader) (Message, error) { var err error = nil msg := &CreateDocument{} return msg, err } -func DecodeCreateElementNode(reader io.Reader) (Message, error) { +func DecodeCreateElementNode(reader BytesReader) (Message, error) { var err error = nil msg := &CreateElementNode{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.ParentID, err = ReadUint(reader); err != nil { + if msg.ParentID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.index, err = ReadUint(reader); err != nil { + if msg.index, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Tag, err = ReadString(reader); err != nil { + if msg.Tag, err = reader.ReadString(); err != nil { return nil, err } - if msg.SVG, err = ReadBoolean(reader); err != nil { + if msg.SVG, err = reader.ReadBoolean(); err != nil { return nil, err } return msg, err } -func DecodeCreateTextNode(reader io.Reader) (Message, error) { +func DecodeCreateTextNode(reader BytesReader) (Message, error) { var err error = nil msg := &CreateTextNode{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.ParentID, err = ReadUint(reader); err != nil { + if msg.ParentID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Index, err = ReadUint(reader); err != nil { + if msg.Index, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeMoveNode(reader io.Reader) (Message, error) { +func DecodeMoveNode(reader BytesReader) (Message, error) { var err error = nil msg := &MoveNode{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.ParentID, err = ReadUint(reader); err != nil { + if msg.ParentID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Index, err = ReadUint(reader); err != nil { + if msg.Index, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeRemoveNode(reader io.Reader) (Message, error) { +func DecodeRemoveNode(reader BytesReader) (Message, error) { var err error = nil msg := &RemoveNode{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeSetNodeAttribute(reader io.Reader) (Message, error) { +func DecodeSetNodeAttribute(reader BytesReader) (Message, error) { var err error = nil msg := &SetNodeAttribute{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Name, err = ReadString(reader); err != nil { + if msg.Name, err = reader.ReadString(); err != nil { return nil, err } - if msg.Value, err = ReadString(reader); err != nil { + if msg.Value, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeRemoveNodeAttribute(reader io.Reader) (Message, error) { +func DecodeRemoveNodeAttribute(reader BytesReader) (Message, error) { var err error = nil msg := &RemoveNodeAttribute{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Name, err = ReadString(reader); err != nil { + if msg.Name, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeSetNodeData(reader io.Reader) (Message, error) { +func DecodeSetNodeData(reader BytesReader) (Message, error) { var err error = nil msg := &SetNodeData{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Data, err = ReadString(reader); err != nil { + if msg.Data, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeSetCSSData(reader io.Reader) (Message, error) { +func DecodeSetCSSData(reader BytesReader) (Message, error) { var err error = nil msg := &SetCSSData{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Data, err = ReadString(reader); err != nil { + if msg.Data, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeSetNodeScroll(reader io.Reader) (Message, error) { +func DecodeSetNodeScroll(reader BytesReader) (Message, error) { var err error = nil msg := &SetNodeScroll{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.X, err = ReadInt(reader); err != nil { + if msg.X, err = reader.ReadInt(); err != nil { return nil, err } - if msg.Y, err = ReadInt(reader); err != nil { + if msg.Y, err = reader.ReadInt(); err != nil { return nil, err } return msg, err } -func DecodeSetInputTarget(reader io.Reader) (Message, error) { +func DecodeSetInputTarget(reader BytesReader) (Message, error) { var err error = nil msg := &SetInputTarget{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Label, err = ReadString(reader); err != nil { + if msg.Label, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeSetInputValue(reader io.Reader) (Message, error) { +func DecodeSetInputValue(reader BytesReader) (Message, error) { var err error = nil msg := &SetInputValue{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Value, err = ReadString(reader); err != nil { + if msg.Value, err = reader.ReadString(); err != nil { return nil, err } - if msg.Mask, err = ReadInt(reader); err != nil { + if msg.Mask, err = reader.ReadInt(); err != nil { return nil, err } return msg, err } -func DecodeSetInputChecked(reader io.Reader) (Message, error) { +func DecodeSetInputChecked(reader BytesReader) (Message, error) { var err error = nil msg := &SetInputChecked{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Checked, err = ReadBoolean(reader); err != nil { + if msg.Checked, err = reader.ReadBoolean(); err != nil { return nil, err } return msg, err } -func DecodeMouseMove(reader io.Reader) (Message, error) { +func DecodeMouseMove(reader BytesReader) (Message, error) { var err error = nil msg := &MouseMove{} - if msg.X, err = ReadUint(reader); err != nil { + if msg.X, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Y, err = ReadUint(reader); err != nil { + if msg.Y, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeConsoleLog(reader io.Reader) (Message, error) { +func DecodeNetworkRequest(reader BytesReader) (Message, error) { + var err error = nil + msg := &NetworkRequest{} + if msg.Type, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Method, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.URL, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Request, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Response, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Status, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Duration, err = reader.ReadUint(); err != nil { + return nil, err + } + return msg, err +} + +func DecodeConsoleLog(reader BytesReader) (Message, error) { var err error = nil msg := &ConsoleLog{} - if msg.Level, err = ReadString(reader); err != nil { + if msg.Level, err = reader.ReadString(); err != nil { return nil, err } - if msg.Value, err = ReadString(reader); err != nil { + if msg.Value, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodePageLoadTiming(reader io.Reader) (Message, error) { +func DecodePageLoadTiming(reader BytesReader) (Message, error) { var err error = nil msg := &PageLoadTiming{} - if msg.RequestStart, err = ReadUint(reader); err != nil { + if msg.RequestStart, err = reader.ReadUint(); err != nil { return nil, err } - if msg.ResponseStart, err = ReadUint(reader); err != nil { + if msg.ResponseStart, err = reader.ReadUint(); err != nil { return nil, err } - if msg.ResponseEnd, err = ReadUint(reader); err != nil { + if msg.ResponseEnd, err = reader.ReadUint(); err != nil { return nil, err } - if msg.DomContentLoadedEventStart, err = ReadUint(reader); err != nil { + if msg.DomContentLoadedEventStart, err = reader.ReadUint(); err != nil { return nil, err } - if msg.DomContentLoadedEventEnd, err = ReadUint(reader); err != nil { + if msg.DomContentLoadedEventEnd, err = reader.ReadUint(); err != nil { return nil, err } - if msg.LoadEventStart, err = ReadUint(reader); err != nil { + if msg.LoadEventStart, err = reader.ReadUint(); err != nil { return nil, err } - if msg.LoadEventEnd, err = ReadUint(reader); err != nil { + if msg.LoadEventEnd, err = reader.ReadUint(); err != nil { return nil, err } - if msg.FirstPaint, err = ReadUint(reader); err != nil { + if msg.FirstPaint, err = reader.ReadUint(); err != nil { return nil, err } - if msg.FirstContentfulPaint, err = ReadUint(reader); err != nil { + if msg.FirstContentfulPaint, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodePageRenderTiming(reader io.Reader) (Message, error) { +func DecodePageRenderTiming(reader BytesReader) (Message, error) { var err error = nil msg := &PageRenderTiming{} - if msg.SpeedIndex, err = ReadUint(reader); err != nil { + if msg.SpeedIndex, err = reader.ReadUint(); err != nil { return nil, err } - if msg.VisuallyComplete, err = ReadUint(reader); err != nil { + if msg.VisuallyComplete, err = reader.ReadUint(); err != nil { return nil, err } - if msg.TimeToInteractive, err = ReadUint(reader); err != nil { + if msg.TimeToInteractive, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeJSExceptionDeprecated(reader io.Reader) (Message, error) { +func DecodeJSExceptionDeprecated(reader BytesReader) (Message, error) { var err error = nil msg := &JSExceptionDeprecated{} - if msg.Name, err = ReadString(reader); err != nil { + if msg.Name, err = reader.ReadString(); err != nil { return nil, err } - if msg.Message, err = ReadString(reader); err != nil { + if msg.Message, err = reader.ReadString(); err != nil { return nil, err } - if msg.Payload, err = ReadString(reader); err != nil { + if msg.Payload, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIntegrationEvent(reader io.Reader) (Message, error) { +func DecodeIntegrationEvent(reader BytesReader) (Message, error) { var err error = nil msg := &IntegrationEvent{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Source, err = ReadString(reader); err != nil { + if msg.Source, err = reader.ReadString(); err != nil { return nil, err } - if msg.Name, err = ReadString(reader); err != nil { + if msg.Name, err = reader.ReadString(); err != nil { return nil, err } - if msg.Message, err = ReadString(reader); err != nil { + if msg.Message, err = reader.ReadString(); err != nil { return nil, err } - if msg.Payload, err = ReadString(reader); err != nil { + if msg.Payload, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeRawCustomEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &RawCustomEvent{} - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err -} - -func DecodeUserID(reader io.Reader) (Message, error) { - var err error = nil - msg := &UserID{} - if msg.ID, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err -} - -func DecodeUserAnonymousID(reader io.Reader) (Message, error) { - var err error = nil - msg := &UserAnonymousID{} - if msg.ID, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err -} - -func DecodeMetadata(reader io.Reader) (Message, error) { - var err error = nil - msg := &Metadata{} - if msg.Key, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err -} - -func DecodePageEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &PageEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Referrer, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Loaded, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.RequestStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ResponseStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.ResponseEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DomContentLoadedEventStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DomContentLoadedEventEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.LoadEventStart, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.LoadEventEnd, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstPaint, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.FirstContentfulPaint, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.SpeedIndex, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.VisuallyComplete, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TimeToInteractive, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err -} - -func DecodeInputEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &InputEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Value, err = ReadString(reader); err != nil { - return nil, err - } - if msg.ValueMasked, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err -} - -func DecodeClickEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &ClickEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.HesitationTime, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Label, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Selector, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err -} - -func DecodeResourceEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &ResourceEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.TTFB, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.HeaderSize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.EncodedBodySize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.DecodedBodySize, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Success, err = ReadBoolean(reader); err != nil { - return nil, err - } - if msg.Method, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Status, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err -} - -func DecodeCustomEvent(reader io.Reader) (Message, error) { +func DecodeCustomEvent(reader BytesReader) (Message, error) { var err error = nil msg := &CustomEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { + if msg.Name, err = reader.ReadString(); err != nil { return nil, err } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Name, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Payload, err = ReadString(reader); err != nil { + if msg.Payload, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeCSSInsertRule(reader io.Reader) (Message, error) { +func DecodeUserID(reader BytesReader) (Message, error) { + var err error = nil + msg := &UserID{} + if msg.ID, err = reader.ReadString(); err != nil { + return nil, err + } + return msg, err +} + +func DecodeUserAnonymousID(reader BytesReader) (Message, error) { + var err error = nil + msg := &UserAnonymousID{} + if msg.ID, err = reader.ReadString(); err != nil { + return nil, err + } + return msg, err +} + +func DecodeMetadata(reader BytesReader) (Message, error) { + var err error = nil + msg := &Metadata{} + if msg.Key, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Value, err = reader.ReadString(); err != nil { + return nil, err + } + return msg, err +} + +func DecodePageEvent(reader BytesReader) (Message, error) { + var err error = nil + msg := &PageEvent{} + if msg.MessageID, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.URL, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Referrer, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Loaded, err = reader.ReadBoolean(); err != nil { + return nil, err + } + if msg.RequestStart, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.ResponseStart, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.ResponseEnd, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.DomContentLoadedEventStart, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.DomContentLoadedEventEnd, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.LoadEventStart, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.LoadEventEnd, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.FirstPaint, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.FirstContentfulPaint, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.SpeedIndex, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.VisuallyComplete, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.TimeToInteractive, err = reader.ReadUint(); err != nil { + return nil, err + } + return msg, err +} + +func DecodeInputEvent(reader BytesReader) (Message, error) { + var err error = nil + msg := &InputEvent{} + if msg.MessageID, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Value, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.ValueMasked, err = reader.ReadBoolean(); err != nil { + return nil, err + } + if msg.Label, err = reader.ReadString(); err != nil { + return nil, err + } + return msg, err +} + +func DecodeClickEvent(reader BytesReader) (Message, error) { + var err error = nil + msg := &ClickEvent{} + if msg.MessageID, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.HesitationTime, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Label, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Selector, err = reader.ReadString(); err != nil { + return nil, err + } + return msg, err +} + +func DecodeResourceEvent(reader BytesReader) (Message, error) { + var err error = nil + msg := &ResourceEvent{} + if msg.MessageID, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Duration, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.TTFB, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.HeaderSize, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.EncodedBodySize, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.DecodedBodySize, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.URL, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Type, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Success, err = reader.ReadBoolean(); err != nil { + return nil, err + } + if msg.Method, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Status, err = reader.ReadUint(); err != nil { + return nil, err + } + return msg, err +} + +func DecodeCSSInsertRule(reader BytesReader) (Message, error) { var err error = nil msg := &CSSInsertRule{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Rule, err = ReadString(reader); err != nil { + if msg.Rule, err = reader.ReadString(); err != nil { return nil, err } - if msg.Index, err = ReadUint(reader); err != nil { + if msg.Index, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeCSSDeleteRule(reader io.Reader) (Message, error) { +func DecodeCSSDeleteRule(reader BytesReader) (Message, error) { var err error = nil msg := &CSSDeleteRule{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Index, err = ReadUint(reader); err != nil { + if msg.Index, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeFetch(reader io.Reader) (Message, error) { +func DecodeFetch(reader BytesReader) (Message, error) { var err error = nil msg := &Fetch{} - if msg.Method, err = ReadString(reader); err != nil { + if msg.Method, err = reader.ReadString(); err != nil { return nil, err } - if msg.URL, err = ReadString(reader); err != nil { + if msg.URL, err = reader.ReadString(); err != nil { return nil, err } - if msg.Request, err = ReadString(reader); err != nil { + if msg.Request, err = reader.ReadString(); err != nil { return nil, err } - if msg.Response, err = ReadString(reader); err != nil { + if msg.Response, err = reader.ReadString(); err != nil { return nil, err } - if msg.Status, err = ReadUint(reader); err != nil { + if msg.Status, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Duration, err = ReadUint(reader); err != nil { + if msg.Duration, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeProfiler(reader io.Reader) (Message, error) { +func DecodeProfiler(reader BytesReader) (Message, error) { var err error = nil msg := &Profiler{} - if msg.Name, err = ReadString(reader); err != nil { + if msg.Name, err = reader.ReadString(); err != nil { return nil, err } - if msg.Duration, err = ReadUint(reader); err != nil { + if msg.Duration, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Args, err = ReadString(reader); err != nil { + if msg.Args, err = reader.ReadString(); err != nil { return nil, err } - if msg.Result, err = ReadString(reader); err != nil { + if msg.Result, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeOTable(reader io.Reader) (Message, error) { +func DecodeOTable(reader BytesReader) (Message, error) { var err error = nil msg := &OTable{} - if msg.Key, err = ReadString(reader); err != nil { + if msg.Key, err = reader.ReadString(); err != nil { return nil, err } - if msg.Value, err = ReadString(reader); err != nil { + if msg.Value, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeStateAction(reader io.Reader) (Message, error) { +func DecodeStateAction(reader BytesReader) (Message, error) { var err error = nil msg := &StateAction{} - if msg.Type, err = ReadString(reader); err != nil { + if msg.Type, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeStateActionEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &StateActionEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Type, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err -} - -func DecodeRedux(reader io.Reader) (Message, error) { +func DecodeRedux(reader BytesReader) (Message, error) { var err error = nil msg := &Redux{} - if msg.Action, err = ReadString(reader); err != nil { + if msg.Action, err = reader.ReadString(); err != nil { return nil, err } - if msg.State, err = ReadString(reader); err != nil { + if msg.State, err = reader.ReadString(); err != nil { return nil, err } - if msg.Duration, err = ReadUint(reader); err != nil { + if msg.Duration, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeVuex(reader io.Reader) (Message, error) { +func DecodeVuex(reader BytesReader) (Message, error) { var err error = nil msg := &Vuex{} - if msg.Mutation, err = ReadString(reader); err != nil { + if msg.Mutation, err = reader.ReadString(); err != nil { return nil, err } - if msg.State, err = ReadString(reader); err != nil { + if msg.State, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeMobX(reader io.Reader) (Message, error) { +func DecodeMobX(reader BytesReader) (Message, error) { var err error = nil msg := &MobX{} - if msg.Type, err = ReadString(reader); err != nil { + if msg.Type, err = reader.ReadString(); err != nil { return nil, err } - if msg.Payload, err = ReadString(reader); err != nil { + if msg.Payload, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeNgRx(reader io.Reader) (Message, error) { +func DecodeNgRx(reader BytesReader) (Message, error) { var err error = nil msg := &NgRx{} - if msg.Action, err = ReadString(reader); err != nil { + if msg.Action, err = reader.ReadString(); err != nil { return nil, err } - if msg.State, err = ReadString(reader); err != nil { + if msg.State, err = reader.ReadString(); err != nil { return nil, err } - if msg.Duration, err = ReadUint(reader); err != nil { + if msg.Duration, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeGraphQL(reader io.Reader) (Message, error) { +func DecodeGraphQL(reader BytesReader) (Message, error) { var err error = nil msg := &GraphQL{} - if msg.OperationKind, err = ReadString(reader); err != nil { + if msg.OperationKind, err = reader.ReadString(); err != nil { return nil, err } - if msg.OperationName, err = ReadString(reader); err != nil { + if msg.OperationName, err = reader.ReadString(); err != nil { return nil, err } - if msg.Variables, err = ReadString(reader); err != nil { + if msg.Variables, err = reader.ReadString(); err != nil { return nil, err } - if msg.Response, err = ReadString(reader); err != nil { + if msg.Response, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodePerformanceTrack(reader io.Reader) (Message, error) { +func DecodePerformanceTrack(reader BytesReader) (Message, error) { var err error = nil msg := &PerformanceTrack{} - if msg.Frames, err = ReadInt(reader); err != nil { + if msg.Frames, err = reader.ReadInt(); err != nil { return nil, err } - if msg.Ticks, err = ReadInt(reader); err != nil { + if msg.Ticks, err = reader.ReadInt(); err != nil { return nil, err } - if msg.TotalJSHeapSize, err = ReadUint(reader); err != nil { + if msg.TotalJSHeapSize, err = reader.ReadUint(); err != nil { return nil, err } - if msg.UsedJSHeapSize, err = ReadUint(reader); err != nil { + if msg.UsedJSHeapSize, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeGraphQLEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &GraphQLEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.OperationKind, err = ReadString(reader); err != nil { - return nil, err - } - if msg.OperationName, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Variables, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Response, err = ReadString(reader); err != nil { - return nil, err - } - return msg, err -} - -func DecodeFetchEvent(reader io.Reader) (Message, error) { - var err error = nil - msg := &FetchEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Timestamp, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Method, err = ReadString(reader); err != nil { - return nil, err - } - if msg.URL, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Request, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Response, err = ReadString(reader); err != nil { - return nil, err - } - if msg.Status, err = ReadUint(reader); err != nil { - return nil, err - } - if msg.Duration, err = ReadUint(reader); err != nil { - return nil, err - } - return msg, err -} - -func DecodeDOMDrop(reader io.Reader) (Message, error) { +func DecodeDOMDrop(reader BytesReader) (Message, error) { var err error = nil msg := &DOMDrop{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeResourceTiming(reader io.Reader) (Message, error) { +func DecodeResourceTiming(reader BytesReader) (Message, error) { var err error = nil msg := &ResourceTiming{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Duration, err = ReadUint(reader); err != nil { + if msg.Duration, err = reader.ReadUint(); err != nil { return nil, err } - if msg.TTFB, err = ReadUint(reader); err != nil { + if msg.TTFB, err = reader.ReadUint(); err != nil { return nil, err } - if msg.HeaderSize, err = ReadUint(reader); err != nil { + if msg.HeaderSize, err = reader.ReadUint(); err != nil { return nil, err } - if msg.EncodedBodySize, err = ReadUint(reader); err != nil { + if msg.EncodedBodySize, err = reader.ReadUint(); err != nil { return nil, err } - if msg.DecodedBodySize, err = ReadUint(reader); err != nil { + if msg.DecodedBodySize, err = reader.ReadUint(); err != nil { return nil, err } - if msg.URL, err = ReadString(reader); err != nil { + if msg.URL, err = reader.ReadString(); err != nil { return nil, err } - if msg.Initiator, err = ReadString(reader); err != nil { + if msg.Initiator, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeConnectionInformation(reader io.Reader) (Message, error) { +func DecodeConnectionInformation(reader BytesReader) (Message, error) { var err error = nil msg := &ConnectionInformation{} - if msg.Downlink, err = ReadUint(reader); err != nil { + if msg.Downlink, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Type, err = ReadString(reader); err != nil { + if msg.Type, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeSetPageVisibility(reader io.Reader) (Message, error) { +func DecodeSetPageVisibility(reader BytesReader) (Message, error) { var err error = nil msg := &SetPageVisibility{} - if msg.hidden, err = ReadBoolean(reader); err != nil { + if msg.hidden, err = reader.ReadBoolean(); err != nil { return nil, err } return msg, err } -func DecodePerformanceTrackAggr(reader io.Reader) (Message, error) { +func DecodePerformanceTrackAggr(reader BytesReader) (Message, error) { var err error = nil msg := &PerformanceTrackAggr{} - if msg.TimestampStart, err = ReadUint(reader); err != nil { + if msg.TimestampStart, err = reader.ReadUint(); err != nil { return nil, err } - if msg.TimestampEnd, err = ReadUint(reader); err != nil { + if msg.TimestampEnd, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MinFPS, err = ReadUint(reader); err != nil { + if msg.MinFPS, err = reader.ReadUint(); err != nil { return nil, err } - if msg.AvgFPS, err = ReadUint(reader); err != nil { + if msg.AvgFPS, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MaxFPS, err = ReadUint(reader); err != nil { + if msg.MaxFPS, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MinCPU, err = ReadUint(reader); err != nil { + if msg.MinCPU, err = reader.ReadUint(); err != nil { return nil, err } - if msg.AvgCPU, err = ReadUint(reader); err != nil { + if msg.AvgCPU, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MaxCPU, err = ReadUint(reader); err != nil { + if msg.MaxCPU, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MinTotalJSHeapSize, err = ReadUint(reader); err != nil { + if msg.MinTotalJSHeapSize, err = reader.ReadUint(); err != nil { return nil, err } - if msg.AvgTotalJSHeapSize, err = ReadUint(reader); err != nil { + if msg.AvgTotalJSHeapSize, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MaxTotalJSHeapSize, err = ReadUint(reader); err != nil { + if msg.MaxTotalJSHeapSize, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MinUsedJSHeapSize, err = ReadUint(reader); err != nil { + if msg.MinUsedJSHeapSize, err = reader.ReadUint(); err != nil { return nil, err } - if msg.AvgUsedJSHeapSize, err = ReadUint(reader); err != nil { + if msg.AvgUsedJSHeapSize, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MaxUsedJSHeapSize, err = ReadUint(reader); err != nil { + if msg.MaxUsedJSHeapSize, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeLoadFontFace(reader io.Reader) (Message, error) { +func DecodeLoadFontFace(reader BytesReader) (Message, error) { var err error = nil msg := &LoadFontFace{} - if msg.ParentID, err = ReadUint(reader); err != nil { + if msg.ParentID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Family, err = ReadString(reader); err != nil { + if msg.Family, err = reader.ReadString(); err != nil { return nil, err } - if msg.Source, err = ReadString(reader); err != nil { + if msg.Source, err = reader.ReadString(); err != nil { return nil, err } - if msg.Descriptors, err = ReadString(reader); err != nil { + if msg.Descriptors, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeSetNodeFocus(reader io.Reader) (Message, error) { +func DecodeSetNodeFocus(reader BytesReader) (Message, error) { var err error = nil msg := &SetNodeFocus{} - if msg.ID, err = ReadInt(reader); err != nil { + if msg.ID, err = reader.ReadInt(); err != nil { return nil, err } return msg, err } -func DecodeLongTask(reader io.Reader) (Message, error) { +func DecodeLongTask(reader BytesReader) (Message, error) { var err error = nil msg := &LongTask{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Duration, err = ReadUint(reader); err != nil { + if msg.Duration, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Context, err = ReadUint(reader); err != nil { + if msg.Context, err = reader.ReadUint(); err != nil { return nil, err } - if msg.ContainerType, err = ReadUint(reader); err != nil { + if msg.ContainerType, err = reader.ReadUint(); err != nil { return nil, err } - if msg.ContainerSrc, err = ReadString(reader); err != nil { + if msg.ContainerSrc, err = reader.ReadString(); err != nil { return nil, err } - if msg.ContainerId, err = ReadString(reader); err != nil { + if msg.ContainerId, err = reader.ReadString(); err != nil { return nil, err } - if msg.ContainerName, err = ReadString(reader); err != nil { + if msg.ContainerName, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeSetNodeAttributeURLBased(reader io.Reader) (Message, error) { +func DecodeSetNodeAttributeURLBased(reader BytesReader) (Message, error) { var err error = nil msg := &SetNodeAttributeURLBased{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Name, err = ReadString(reader); err != nil { + if msg.Name, err = reader.ReadString(); err != nil { return nil, err } - if msg.Value, err = ReadString(reader); err != nil { + if msg.Value, err = reader.ReadString(); err != nil { return nil, err } - if msg.BaseURL, err = ReadString(reader); err != nil { + if msg.BaseURL, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeSetCSSDataURLBased(reader io.Reader) (Message, error) { +func DecodeSetCSSDataURLBased(reader BytesReader) (Message, error) { var err error = nil msg := &SetCSSDataURLBased{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Data, err = ReadString(reader); err != nil { + if msg.Data, err = reader.ReadString(); err != nil { return nil, err } - if msg.BaseURL, err = ReadString(reader); err != nil { + if msg.BaseURL, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIssueEvent(reader io.Reader) (Message, error) { +func DecodeIssueEvent(reader BytesReader) (Message, error) { var err error = nil msg := &IssueEvent{} - if msg.MessageID, err = ReadUint(reader); err != nil { + if msg.MessageID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Type, err = ReadString(reader); err != nil { + if msg.Type, err = reader.ReadString(); err != nil { return nil, err } - if msg.ContextString, err = ReadString(reader); err != nil { + if msg.ContextString, err = reader.ReadString(); err != nil { return nil, err } - if msg.Context, err = ReadString(reader); err != nil { + if msg.Context, err = reader.ReadString(); err != nil { return nil, err } - if msg.Payload, err = ReadString(reader); err != nil { + if msg.Payload, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeTechnicalInfo(reader io.Reader) (Message, error) { +func DecodeTechnicalInfo(reader BytesReader) (Message, error) { var err error = nil msg := &TechnicalInfo{} - if msg.Type, err = ReadString(reader); err != nil { + if msg.Type, err = reader.ReadString(); err != nil { return nil, err } - if msg.Value, err = ReadString(reader); err != nil { + if msg.Value, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeCustomIssue(reader io.Reader) (Message, error) { +func DecodeCustomIssue(reader BytesReader) (Message, error) { var err error = nil msg := &CustomIssue{} - if msg.Name, err = ReadString(reader); err != nil { + if msg.Name, err = reader.ReadString(); err != nil { return nil, err } - if msg.Payload, err = ReadString(reader); err != nil { + if msg.Payload, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeAssetCache(reader io.Reader) (Message, error) { +func DecodeAssetCache(reader BytesReader) (Message, error) { var err error = nil msg := &AssetCache{} - if msg.URL, err = ReadString(reader); err != nil { + if msg.URL, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeCSSInsertRuleURLBased(reader io.Reader) (Message, error) { +func DecodeCSSInsertRuleURLBased(reader BytesReader) (Message, error) { var err error = nil msg := &CSSInsertRuleURLBased{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Rule, err = ReadString(reader); err != nil { + if msg.Rule, err = reader.ReadString(); err != nil { return nil, err } - if msg.Index, err = ReadUint(reader); err != nil { + if msg.Index, err = reader.ReadUint(); err != nil { return nil, err } - if msg.BaseURL, err = ReadString(reader); err != nil { + if msg.BaseURL, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeMouseClick(reader io.Reader) (Message, error) { +func DecodeMouseClick(reader BytesReader) (Message, error) { var err error = nil msg := &MouseClick{} - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.HesitationTime, err = ReadUint(reader); err != nil { + if msg.HesitationTime, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Label, err = ReadString(reader); err != nil { + if msg.Label, err = reader.ReadString(); err != nil { return nil, err } - if msg.Selector, err = ReadString(reader); err != nil { + if msg.Selector, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeCreateIFrameDocument(reader io.Reader) (Message, error) { +func DecodeCreateIFrameDocument(reader BytesReader) (Message, error) { var err error = nil msg := &CreateIFrameDocument{} - if msg.FrameID, err = ReadUint(reader); err != nil { + if msg.FrameID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeAdoptedSSReplaceURLBased(reader io.Reader) (Message, error) { +func DecodeAdoptedSSReplaceURLBased(reader BytesReader) (Message, error) { var err error = nil msg := &AdoptedSSReplaceURLBased{} - if msg.SheetID, err = ReadUint(reader); err != nil { + if msg.SheetID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Text, err = ReadString(reader); err != nil { + if msg.Text, err = reader.ReadString(); err != nil { return nil, err } - if msg.BaseURL, err = ReadString(reader); err != nil { + if msg.BaseURL, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeAdoptedSSReplace(reader io.Reader) (Message, error) { +func DecodeAdoptedSSReplace(reader BytesReader) (Message, error) { var err error = nil msg := &AdoptedSSReplace{} - if msg.SheetID, err = ReadUint(reader); err != nil { + if msg.SheetID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Text, err = ReadString(reader); err != nil { + if msg.Text, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeAdoptedSSInsertRuleURLBased(reader io.Reader) (Message, error) { +func DecodeAdoptedSSInsertRuleURLBased(reader BytesReader) (Message, error) { var err error = nil msg := &AdoptedSSInsertRuleURLBased{} - if msg.SheetID, err = ReadUint(reader); err != nil { + if msg.SheetID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Rule, err = ReadString(reader); err != nil { + if msg.Rule, err = reader.ReadString(); err != nil { return nil, err } - if msg.Index, err = ReadUint(reader); err != nil { + if msg.Index, err = reader.ReadUint(); err != nil { return nil, err } - if msg.BaseURL, err = ReadString(reader); err != nil { + if msg.BaseURL, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeAdoptedSSInsertRule(reader io.Reader) (Message, error) { +func DecodeAdoptedSSInsertRule(reader BytesReader) (Message, error) { var err error = nil msg := &AdoptedSSInsertRule{} - if msg.SheetID, err = ReadUint(reader); err != nil { + if msg.SheetID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Rule, err = ReadString(reader); err != nil { + if msg.Rule, err = reader.ReadString(); err != nil { return nil, err } - if msg.Index, err = ReadUint(reader); err != nil { + if msg.Index, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeAdoptedSSDeleteRule(reader io.Reader) (Message, error) { +func DecodeAdoptedSSDeleteRule(reader BytesReader) (Message, error) { var err error = nil msg := &AdoptedSSDeleteRule{} - if msg.SheetID, err = ReadUint(reader); err != nil { + if msg.SheetID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Index, err = ReadUint(reader); err != nil { + if msg.Index, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeAdoptedSSAddOwner(reader io.Reader) (Message, error) { +func DecodeAdoptedSSAddOwner(reader BytesReader) (Message, error) { var err error = nil msg := &AdoptedSSAddOwner{} - if msg.SheetID, err = ReadUint(reader); err != nil { + if msg.SheetID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeAdoptedSSRemoveOwner(reader io.Reader) (Message, error) { +func DecodeAdoptedSSRemoveOwner(reader BytesReader) (Message, error) { var err error = nil msg := &AdoptedSSRemoveOwner{} - if msg.SheetID, err = ReadUint(reader); err != nil { + if msg.SheetID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.ID, err = ReadUint(reader); err != nil { + if msg.ID, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeZustand(reader io.Reader) (Message, error) { +func DecodeZustand(reader BytesReader) (Message, error) { var err error = nil msg := &Zustand{} - if msg.Mutation, err = ReadString(reader); err != nil { + if msg.Mutation, err = reader.ReadString(); err != nil { return nil, err } - if msg.State, err = ReadString(reader); err != nil { + if msg.State, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeJSException(reader io.Reader) (Message, error) { +func DecodeJSException(reader BytesReader) (Message, error) { var err error = nil msg := &JSException{} - if msg.Name, err = ReadString(reader); err != nil { + if msg.Name, err = reader.ReadString(); err != nil { return nil, err } - if msg.Message, err = ReadString(reader); err != nil { + if msg.Message, err = reader.ReadString(); err != nil { return nil, err } - if msg.Payload, err = ReadString(reader); err != nil { + if msg.Payload, err = reader.ReadString(); err != nil { return nil, err } - if msg.Metadata, err = ReadString(reader); err != nil { + if msg.Metadata, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeSessionEnd(reader io.Reader) (Message, error) { +func DecodeSessionEnd(reader BytesReader) (Message, error) { var err error = nil msg := &SessionEnd{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.EncryptionKey, err = ReadString(reader); err != nil { + if msg.EncryptionKey, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeSessionSearch(reader io.Reader) (Message, error) { +func DecodeSessionSearch(reader BytesReader) (Message, error) { var err error = nil msg := &SessionSearch{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Partition, err = ReadUint(reader); err != nil { + if msg.Partition, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeIOSBatchMeta(reader io.Reader) (Message, error) { +func DecodeIOSBatchMeta(reader BytesReader) (Message, error) { var err error = nil msg := &IOSBatchMeta{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.FirstIndex, err = ReadUint(reader); err != nil { + if msg.FirstIndex, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeIOSSessionStart(reader io.Reader) (Message, error) { +func DecodeIOSSessionStart(reader BytesReader) (Message, error) { var err error = nil msg := &IOSSessionStart{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.ProjectID, err = ReadUint(reader); err != nil { + if msg.ProjectID, err = reader.ReadUint(); err != nil { return nil, err } - if msg.TrackerVersion, err = ReadString(reader); err != nil { + if msg.TrackerVersion, err = reader.ReadString(); err != nil { return nil, err } - if msg.RevID, err = ReadString(reader); err != nil { + if msg.RevID, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserUUID, err = ReadString(reader); err != nil { + if msg.UserUUID, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserOS, err = ReadString(reader); err != nil { + if msg.UserOS, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserOSVersion, err = ReadString(reader); err != nil { + if msg.UserOSVersion, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserDevice, err = ReadString(reader); err != nil { + if msg.UserDevice, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserDeviceType, err = ReadString(reader); err != nil { + if msg.UserDeviceType, err = reader.ReadString(); err != nil { return nil, err } - if msg.UserCountry, err = ReadString(reader); err != nil { + if msg.UserCountry, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIOSSessionEnd(reader io.Reader) (Message, error) { +func DecodeIOSSessionEnd(reader BytesReader) (Message, error) { var err error = nil msg := &IOSSessionEnd{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeIOSMetadata(reader io.Reader) (Message, error) { +func DecodeIOSMetadata(reader BytesReader) (Message, error) { var err error = nil msg := &IOSMetadata{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Key, err = ReadString(reader); err != nil { + if msg.Key, err = reader.ReadString(); err != nil { return nil, err } - if msg.Value, err = ReadString(reader); err != nil { + if msg.Value, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIOSCustomEvent(reader io.Reader) (Message, error) { +func DecodeIOSCustomEvent(reader BytesReader) (Message, error) { var err error = nil msg := &IOSCustomEvent{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Name, err = ReadString(reader); err != nil { + if msg.Name, err = reader.ReadString(); err != nil { return nil, err } - if msg.Payload, err = ReadString(reader); err != nil { + if msg.Payload, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIOSUserID(reader io.Reader) (Message, error) { +func DecodeIOSUserID(reader BytesReader) (Message, error) { var err error = nil msg := &IOSUserID{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Value, err = ReadString(reader); err != nil { + if msg.Value, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIOSUserAnonymousID(reader io.Reader) (Message, error) { +func DecodeIOSUserAnonymousID(reader BytesReader) (Message, error) { var err error = nil msg := &IOSUserAnonymousID{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Value, err = ReadString(reader); err != nil { + if msg.Value, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIOSScreenChanges(reader io.Reader) (Message, error) { +func DecodeIOSScreenChanges(reader BytesReader) (Message, error) { var err error = nil msg := &IOSScreenChanges{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.X, err = ReadUint(reader); err != nil { + if msg.X, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Y, err = ReadUint(reader); err != nil { + if msg.Y, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Width, err = ReadUint(reader); err != nil { + if msg.Width, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Height, err = ReadUint(reader); err != nil { + if msg.Height, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeIOSCrash(reader io.Reader) (Message, error) { +func DecodeIOSCrash(reader BytesReader) (Message, error) { var err error = nil msg := &IOSCrash{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Name, err = ReadString(reader); err != nil { + if msg.Name, err = reader.ReadString(); err != nil { return nil, err } - if msg.Reason, err = ReadString(reader); err != nil { + if msg.Reason, err = reader.ReadString(); err != nil { return nil, err } - if msg.Stacktrace, err = ReadString(reader); err != nil { + if msg.Stacktrace, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIOSScreenEnter(reader io.Reader) (Message, error) { +func DecodeIOSScreenEnter(reader BytesReader) (Message, error) { var err error = nil msg := &IOSScreenEnter{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Title, err = ReadString(reader); err != nil { + if msg.Title, err = reader.ReadString(); err != nil { return nil, err } - if msg.ViewName, err = ReadString(reader); err != nil { + if msg.ViewName, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIOSScreenLeave(reader io.Reader) (Message, error) { +func DecodeIOSScreenLeave(reader BytesReader) (Message, error) { var err error = nil msg := &IOSScreenLeave{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Title, err = ReadString(reader); err != nil { + if msg.Title, err = reader.ReadString(); err != nil { return nil, err } - if msg.ViewName, err = ReadString(reader); err != nil { + if msg.ViewName, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIOSClickEvent(reader io.Reader) (Message, error) { +func DecodeIOSClickEvent(reader BytesReader) (Message, error) { var err error = nil msg := &IOSClickEvent{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Label, err = ReadString(reader); err != nil { + if msg.Label, err = reader.ReadString(); err != nil { return nil, err } - if msg.X, err = ReadUint(reader); err != nil { + if msg.X, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Y, err = ReadUint(reader); err != nil { + if msg.Y, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeIOSInputEvent(reader io.Reader) (Message, error) { +func DecodeIOSInputEvent(reader BytesReader) (Message, error) { var err error = nil msg := &IOSInputEvent{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Value, err = ReadString(reader); err != nil { + if msg.Value, err = reader.ReadString(); err != nil { return nil, err } - if msg.ValueMasked, err = ReadBoolean(reader); err != nil { + if msg.ValueMasked, err = reader.ReadBoolean(); err != nil { return nil, err } - if msg.Label, err = ReadString(reader); err != nil { + if msg.Label, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIOSPerformanceEvent(reader io.Reader) (Message, error) { +func DecodeIOSPerformanceEvent(reader BytesReader) (Message, error) { var err error = nil msg := &IOSPerformanceEvent{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Name, err = ReadString(reader); err != nil { + if msg.Name, err = reader.ReadString(); err != nil { return nil, err } - if msg.Value, err = ReadUint(reader); err != nil { + if msg.Value, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeIOSLog(reader io.Reader) (Message, error) { +func DecodeIOSLog(reader BytesReader) (Message, error) { var err error = nil msg := &IOSLog{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Severity, err = ReadString(reader); err != nil { + if msg.Severity, err = reader.ReadString(); err != nil { return nil, err } - if msg.Content, err = ReadString(reader); err != nil { + if msg.Content, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIOSInternalError(reader io.Reader) (Message, error) { +func DecodeIOSInternalError(reader BytesReader) (Message, error) { var err error = nil msg := &IOSInternalError{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Content, err = ReadString(reader); err != nil { + if msg.Content, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func DecodeIOSNetworkCall(reader io.Reader) (Message, error) { +func DecodeIOSNetworkCall(reader BytesReader) (Message, error) { var err error = nil msg := &IOSNetworkCall{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Length, err = ReadUint(reader); err != nil { + if msg.Length, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Duration, err = ReadUint(reader); err != nil { + if msg.Duration, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Headers, err = ReadString(reader); err != nil { + if msg.Headers, err = reader.ReadString(); err != nil { return nil, err } - if msg.Body, err = ReadString(reader); err != nil { + if msg.Body, err = reader.ReadString(); err != nil { return nil, err } - if msg.URL, err = ReadString(reader); err != nil { + if msg.URL, err = reader.ReadString(); err != nil { return nil, err } - if msg.Success, err = ReadBoolean(reader); err != nil { + if msg.Success, err = reader.ReadBoolean(); err != nil { return nil, err } - if msg.Method, err = ReadString(reader); err != nil { + if msg.Method, err = reader.ReadString(); err != nil { return nil, err } - if msg.Status, err = ReadUint(reader); err != nil { + if msg.Status, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeIOSPerformanceAggregated(reader io.Reader) (Message, error) { +func DecodeIOSPerformanceAggregated(reader BytesReader) (Message, error) { var err error = nil msg := &IOSPerformanceAggregated{} - if msg.TimestampStart, err = ReadUint(reader); err != nil { + if msg.TimestampStart, err = reader.ReadUint(); err != nil { return nil, err } - if msg.TimestampEnd, err = ReadUint(reader); err != nil { + if msg.TimestampEnd, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MinFPS, err = ReadUint(reader); err != nil { + if msg.MinFPS, err = reader.ReadUint(); err != nil { return nil, err } - if msg.AvgFPS, err = ReadUint(reader); err != nil { + if msg.AvgFPS, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MaxFPS, err = ReadUint(reader); err != nil { + if msg.MaxFPS, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MinCPU, err = ReadUint(reader); err != nil { + if msg.MinCPU, err = reader.ReadUint(); err != nil { return nil, err } - if msg.AvgCPU, err = ReadUint(reader); err != nil { + if msg.AvgCPU, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MaxCPU, err = ReadUint(reader); err != nil { + if msg.MaxCPU, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MinMemory, err = ReadUint(reader); err != nil { + if msg.MinMemory, err = reader.ReadUint(); err != nil { return nil, err } - if msg.AvgMemory, err = ReadUint(reader); err != nil { + if msg.AvgMemory, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MaxMemory, err = ReadUint(reader); err != nil { + if msg.MaxMemory, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MinBattery, err = ReadUint(reader); err != nil { + if msg.MinBattery, err = reader.ReadUint(); err != nil { return nil, err } - if msg.AvgBattery, err = ReadUint(reader); err != nil { + if msg.AvgBattery, err = reader.ReadUint(); err != nil { return nil, err } - if msg.MaxBattery, err = ReadUint(reader); err != nil { + if msg.MaxBattery, err = reader.ReadUint(); err != nil { return nil, err } return msg, err } -func DecodeIOSIssueEvent(reader io.Reader) (Message, error) { +func DecodeIOSIssueEvent(reader BytesReader) (Message, error) { var err error = nil msg := &IOSIssueEvent{} - if msg.Timestamp, err = ReadUint(reader); err != nil { + if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } - if msg.Type, err = ReadString(reader); err != nil { + if msg.Type, err = reader.ReadString(); err != nil { return nil, err } - if msg.ContextString, err = ReadString(reader); err != nil { + if msg.ContextString, err = reader.ReadString(); err != nil { return nil, err } - if msg.Context, err = ReadString(reader); err != nil { + if msg.Context, err = reader.ReadString(); err != nil { return nil, err } - if msg.Payload, err = ReadString(reader); err != nil { + if msg.Payload, err = reader.ReadString(); err != nil { return nil, err } return msg, err } -func ReadMessage(t uint64, reader io.Reader) (Message, error) { +func ReadMessage(t uint64, reader BytesReader) (Message, error) { switch t { - case 80: return DecodeBatchMeta(reader) - case 81: return DecodeBatchMetadata(reader) - case 82: return DecodePartitionedMessage(reader) - case 0: return DecodeTimestamp(reader) - case 1: return DecodeSessionStart(reader) - case 3: return DecodeSessionEndDeprecated(reader) - case 4: return DecodeSetPageLocation(reader) - case 5: return DecodeSetViewportSize(reader) - case 6: return DecodeSetViewportScroll(reader) - case 7: return DecodeCreateDocument(reader) - case 8: return DecodeCreateElementNode(reader) - case 9: return DecodeCreateTextNode(reader) - case 10: return DecodeMoveNode(reader) - case 11: return DecodeRemoveNode(reader) - case 12: return DecodeSetNodeAttribute(reader) - case 13: return DecodeRemoveNodeAttribute(reader) - case 14: return DecodeSetNodeData(reader) - case 15: return DecodeSetCSSData(reader) - case 16: return DecodeSetNodeScroll(reader) - case 17: return DecodeSetInputTarget(reader) - case 18: return DecodeSetInputValue(reader) - case 19: return DecodeSetInputChecked(reader) - case 20: return DecodeMouseMove(reader) - + case 21: + return DecodeNetworkRequest(reader) case 22: return DecodeConsoleLog(reader) - case 23: return DecodePageLoadTiming(reader) - case 24: return DecodePageRenderTiming(reader) - case 25: return DecodeJSExceptionDeprecated(reader) - case 26: return DecodeIntegrationEvent(reader) - case 27: - return DecodeRawCustomEvent(reader) - + return DecodeCustomEvent(reader) case 28: return DecodeUserID(reader) - case 29: return DecodeUserAnonymousID(reader) - case 30: return DecodeMetadata(reader) - case 31: return DecodePageEvent(reader) - case 32: return DecodeInputEvent(reader) - case 33: return DecodeClickEvent(reader) - case 35: return DecodeResourceEvent(reader) - - case 36: - return DecodeCustomEvent(reader) - case 37: return DecodeCSSInsertRule(reader) - case 38: return DecodeCSSDeleteRule(reader) - case 39: return DecodeFetch(reader) - case 40: return DecodeProfiler(reader) - case 41: return DecodeOTable(reader) - case 42: return DecodeStateAction(reader) - - case 43: - return DecodeStateActionEvent(reader) - case 44: return DecodeRedux(reader) - case 45: return DecodeVuex(reader) - case 46: return DecodeMobX(reader) - case 47: return DecodeNgRx(reader) - case 48: return DecodeGraphQL(reader) - case 49: return DecodePerformanceTrack(reader) - - case 50: - return DecodeGraphQLEvent(reader) - - case 51: - return DecodeFetchEvent(reader) - case 52: return DecodeDOMDrop(reader) - case 53: return DecodeResourceTiming(reader) - case 54: return DecodeConnectionInformation(reader) - case 55: return DecodeSetPageVisibility(reader) - case 56: return DecodePerformanceTrackAggr(reader) - case 57: return DecodeLoadFontFace(reader) - case 58: return DecodeSetNodeFocus(reader) - case 59: return DecodeLongTask(reader) - case 60: return DecodeSetNodeAttributeURLBased(reader) - case 61: return DecodeSetCSSDataURLBased(reader) - case 62: return DecodeIssueEvent(reader) - case 63: return DecodeTechnicalInfo(reader) - case 64: return DecodeCustomIssue(reader) - case 66: return DecodeAssetCache(reader) - case 67: return DecodeCSSInsertRuleURLBased(reader) - case 69: return DecodeMouseClick(reader) - case 70: return DecodeCreateIFrameDocument(reader) - case 71: return DecodeAdoptedSSReplaceURLBased(reader) - case 72: return DecodeAdoptedSSReplace(reader) - case 73: return DecodeAdoptedSSInsertRuleURLBased(reader) - case 74: return DecodeAdoptedSSInsertRule(reader) - case 75: return DecodeAdoptedSSDeleteRule(reader) - case 76: return DecodeAdoptedSSAddOwner(reader) - case 77: return DecodeAdoptedSSRemoveOwner(reader) - case 79: return DecodeZustand(reader) - case 78: return DecodeJSException(reader) - case 126: return DecodeSessionEnd(reader) - case 127: return DecodeSessionSearch(reader) - case 107: return DecodeIOSBatchMeta(reader) - case 90: return DecodeIOSSessionStart(reader) - case 91: return DecodeIOSSessionEnd(reader) - case 92: return DecodeIOSMetadata(reader) - case 93: return DecodeIOSCustomEvent(reader) - case 94: return DecodeIOSUserID(reader) - case 95: return DecodeIOSUserAnonymousID(reader) - case 96: return DecodeIOSScreenChanges(reader) - case 97: return DecodeIOSCrash(reader) - case 98: return DecodeIOSScreenEnter(reader) - case 99: return DecodeIOSScreenLeave(reader) - case 100: return DecodeIOSClickEvent(reader) - case 101: return DecodeIOSInputEvent(reader) - case 102: return DecodeIOSPerformanceEvent(reader) - case 103: return DecodeIOSLog(reader) - case 104: return DecodeIOSInternalError(reader) - case 105: return DecodeIOSNetworkCall(reader) - case 110: return DecodeIOSPerformanceAggregated(reader) - case 111: return DecodeIOSIssueEvent(reader) - } return nil, fmt.Errorf("Unknown message code: %v", t) } diff --git a/backend/pkg/messages/reader.go b/backend/pkg/messages/reader.go new file mode 100644 index 000000000..1e9fa42db --- /dev/null +++ b/backend/pkg/messages/reader.go @@ -0,0 +1,166 @@ +package messages + +import ( + "fmt" + "io" +) + +type MessageReader interface { + Parse() (err error) + Next() bool + Message() Message +} + +func NewMessageReader(data []byte) MessageReader { + return &messageReaderImpl{ + data: data, + reader: NewBytesReader(data), + list: make([]*MessageMeta, 0, 1024), + } +} + +type MessageMeta struct { + msgType uint64 + msgSize uint64 + msgFrom uint64 +} + +type messageReaderImpl struct { + data []byte + reader BytesReader + msgType uint64 + msgSize uint64 + msgBody []byte + version int + broken bool + message Message + err error + list []*MessageMeta + listPtr int +} + +func (m *messageReaderImpl) Parse() (err error) { + m.listPtr = 0 + m.list = m.list[:0] + m.broken = false + for { + // Try to read and decode message type, message size and check range in + m.msgType, err = m.reader.ReadUint() + if err != nil { + if err != io.EOF { + return fmt.Errorf("read message type err: %s", err) + } + // Reached the end of batch + return nil + } + + // Read message body (and decode if protocol version less than 1) + if m.version > 0 && messageHasSize(m.msgType) { + // Read message size if it is a new protocol version + m.msgSize, err = m.reader.ReadSize() + if err != nil { + return fmt.Errorf("read message size err: %s", err) + } + + // Try to avoid EOF error + + curr := m.reader.Pointer() + if len(m.data)-int(curr) < int(m.msgSize) { + return fmt.Errorf("can't read message body") + } + + // Dirty hack to avoid extra memory allocation + m.data[curr-1] = uint8(m.msgType) + + // Add message meta to list + m.list = append(m.list, &MessageMeta{ + msgType: m.msgType, + msgSize: m.msgSize + 1, + msgFrom: uint64(curr - 1), + }) + + // Update data pointer + m.reader.SetPointer(curr + int64(m.msgSize)) + } else { + from := m.reader.Pointer() - 1 + msg, err := ReadMessage(m.msgType, m.reader) + if err != nil { + return fmt.Errorf("read message err: %s", err) + } + if m.msgType == MsgBatchMeta || m.msgType == MsgBatchMetadata { + if len(m.list) > 0 { + return fmt.Errorf("batch meta not at the start of batch") + } + switch message := msg.(type) { + case *BatchMetadata: + m.version = int(message.Version) + case *BatchMeta: + m.version = 0 + } + if m.version != 1 { + // Unsupported tracker version, reset reader + m.list = m.list[:0] + m.reader.SetPointer(0) + return nil + } + } + + // Add message meta to list + m.list = append(m.list, &MessageMeta{ + msgType: m.msgType, + msgSize: uint64(m.reader.Pointer() - from), + msgFrom: uint64(from), + }) + } + } +} + +func (m *messageReaderImpl) Next() bool { + if m.broken { + return false + } + + // For new version of tracker + if len(m.list) > 0 { + if m.listPtr >= len(m.list) { + return false + } + + meta := m.list[m.listPtr] + m.listPtr++ + m.message = &RawMessage{ + tp: meta.msgType, + data: m.data[meta.msgFrom : meta.msgFrom+meta.msgSize], + broken: &m.broken, + meta: &message{}, + } + return true + } + + // For prev version of tracker + var msg Message + var err error + + // Try to read and decode message type, message size and check range in + m.msgType, err = m.reader.ReadUint() + if err != nil { + if err != io.EOF { + m.err = fmt.Errorf("read message type err: %s", err) + } + // Reached the end of batch + return false + } + + // Read and decode message + msg, err = ReadMessage(m.msgType, m.reader) + if err != nil { + m.err = fmt.Errorf("read message err: %s", err) + return false + } + m.message = msg + return true +} + +func (m *messageReaderImpl) Message() Message { + return m.message +} diff --git a/backend/pkg/monitoring/metrics.go b/backend/pkg/monitoring/metrics.go index d3cd807c6..803fba127 100644 --- a/backend/pkg/monitoring/metrics.go +++ b/backend/pkg/monitoring/metrics.go @@ -38,7 +38,7 @@ func New(name string) *Metrics { // initPrometheusDataExporter allows to use collected metrics in prometheus func (m *Metrics) initPrometheusDataExporter() { config := prometheus.Config{ - DefaultHistogramBoundaries: []float64{1, 2, 5, 10, 20, 50}, + DefaultHistogramBoundaries: []float64{1, 2, 5, 10, 20, 50, 100, 250, 500, 1000}, } c := controller.New( processor.NewFactory( @@ -76,8 +76,8 @@ Counter is a synchronous instrument that measures additive non-decreasing values */ func (m *Metrics) RegisterCounter(name string) (syncfloat64.Counter, error) { - if _, ok := m.counters[name]; ok { - return nil, fmt.Errorf("counter %s already exists", name) + if counter, ok := m.counters[name]; ok { + return counter, nil } counter, err := m.meter.SyncFloat64().Counter(name) if err != nil { @@ -100,8 +100,8 @@ for example, the number of: */ func (m *Metrics) RegisterUpDownCounter(name string) (syncfloat64.UpDownCounter, error) { - if _, ok := m.upDownCounters[name]; ok { - return nil, fmt.Errorf("upDownCounter %s already exists", name) + if counter, ok := m.upDownCounters[name]; ok { + return counter, nil } counter, err := m.meter.SyncFloat64().UpDownCounter(name) if err != nil { @@ -122,8 +122,8 @@ Histogram is a synchronous instrument that produces a histogram from recorded va */ func (m *Metrics) RegisterHistogram(name string) (syncfloat64.Histogram, error) { - if _, ok := m.histograms[name]; ok { - return nil, fmt.Errorf("histogram %s already exists", name) + if hist, ok := m.histograms[name]; ok { + return hist, nil } hist, err := m.meter.SyncFloat64().Histogram(name) if err != nil { diff --git a/ee/api/app.py b/ee/api/app.py index 346de2599..a1e203005 100644 --- a/ee/api/app.py +++ b/ee/api/app.py @@ -12,9 +12,11 @@ from starlette.responses import StreamingResponse, JSONResponse from chalicelib.core import traces from chalicelib.utils import helper from chalicelib.utils import pg_client +from chalicelib.utils import events_queue from routers import core, core_dynamic, ee, saml from routers.crons import core_crons from routers.crons import core_dynamic_crons +from routers.crons import ee_crons from routers.subs import insights, metrics, v1_api_ee from routers.subs import v1_api @@ -80,9 +82,10 @@ app.queue_system = queue.Queue() async def startup(): logging.info(">>>>> starting up <<<<<") await pg_client.init() + await events_queue.init() app.schedule.start() - for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs + traces.cron_jobs: + for job in core_crons.cron_jobs + core_dynamic_crons.cron_jobs + traces.cron_jobs + ee_crons.ee_cron_jobs: app.schedule.add_job(id=job["func"].__name__, **job) ap_logger.info(">Scheduled jobs:") @@ -95,6 +98,7 @@ async def shutdown(): logging.info(">>>>> shutting down <<<<<") app.schedule.shutdown(wait=True) await traces.process_traces_queue() + await events_queue.terminate() await pg_client.terminate() diff --git a/ee/api/chalicelib/core/signals.py b/ee/api/chalicelib/core/signals.py new file mode 100644 index 000000000..72822d0af --- /dev/null +++ b/ee/api/chalicelib/core/signals.py @@ -0,0 +1,12 @@ +import schemas_ee +import logging +from chalicelib.utils import events_queue + + +def handle_frontend_signals_queued(project_id: int, user_id: int, data: schemas_ee.SignalsSchema): + try: + events_queue.global_queue.put((project_id, user_id, data)) + return {'data': 'insertion succeded'} + except Exception as e: + logging.info(f'Error while inserting: {e}') + return {'errors': [e]} diff --git a/ee/api/chalicelib/utils/events_queue.py b/ee/api/chalicelib/utils/events_queue.py new file mode 100644 index 000000000..d95938857 --- /dev/null +++ b/ee/api/chalicelib/utils/events_queue.py @@ -0,0 +1,80 @@ +import json +import queue +import logging + +from chalicelib.utils import pg_client + +global_queue = None + +class EventQueue(): + + def __init__(self, test=False, queue_max_length=100): + self.events = queue.Queue() + self.events.maxsize = queue_max_length + self.test = test + + def flush(self, conn): + events = list() + params = dict() + # while not self.events.empty(): + # project_id, user_id, element = self.events.get() + # events.append("({project_id}, {user_id}, {timestamp}, '{action}', '{source}', '{category}', '{data}')".format( + # project_id=project_id, user_id=user_id, timestamp=element.timestamp, action=element.action, source=element.source, category=element.category, data=json.dumps(element.data))) + i = 0 + while not self.events.empty(): + project_id, user_id, element = self.events.get() + params[f'project_id_{i}'] = project_id + params[f'user_id_{i}'] = user_id + for _key, _val in element.dict().items(): + if _key == 'data': + params[f'{_key}_{i}'] = json.dumps(_val) + else: + params[f'{_key}_{i}'] = _val + events.append(f"(%(project_id_{i})s, %(user_id_{i})s, %(timestamp_{i})s, %(action_{i})s, %(source_{i})s, %(category_{i})s, %(data_{i})s::jsonb)") + i += 1 + if i == 0: + return 0 + if self.test: + print(events) + return 1 + conn.execute( + conn.mogrify(f"""INSERT INTO public.frontend_signals (project_id, user_id, timestamp, action, source, category, data) + VALUES {' , '.join(events)}""", params) + ) + return 1 + + def force_flush(self): + if not self.events.empty(): + try: + with pg_client.PostgresClient() as conn: + self.flush(conn) + except Exception as e: + logging.info(f'Error: {e}') + + def put(self, element): + if self.events.full(): + try: + with pg_client.PostgresClient() as conn: + self.flush(conn) + except Exception as e: + logging.info(f'Error: {e}') + self.events.put(element) + self.events.task_done() + +async def init(test=False): + global global_queue + global_queue = EventQueue(test=test) + logging.info("> queue initialized") + +async def terminate(): + global global_queue + if global_queue is not None: + global_queue.force_flush() + logging.info('> queue fulshed') + +# def __process_schema(trace): +# data = trace.dict() +# data["parameters"] = json.dumps(trace.parameters) if trace.parameters is not None and len( +# trace.parameters.keys()) > 0 else None +# data["payload"] = json.dumps(trace.payload) if trace.payload is not None and len(trace.payload.keys()) > 0 else None +# return data diff --git a/ee/api/routers/crons/core_dynamic_crons.py b/ee/api/routers/crons/core_dynamic_crons.py index 1d8320eb7..31a144749 100644 --- a/ee/api/routers/crons/core_dynamic_crons.py +++ b/ee/api/routers/crons/core_dynamic_crons.py @@ -23,13 +23,14 @@ def unlock_cron() -> None: cron_jobs = [ - {"func": unlock_cron, "trigger": "cron", "hour": "*"} + {"func": unlock_cron, "trigger": "cron", "hour": "*"}, ] SINGLE_CRONS = [{"func": telemetry_cron, "trigger": "cron", "day_of_week": "*"}, {"func": run_scheduled_jobs, "trigger": "interval", "seconds": 60, "misfire_grace_time": 20}, {"func": weekly_report, "trigger": "cron", "day_of_week": "mon", "hour": 5, - "misfire_grace_time": 60 * 60}] + "misfire_grace_time": 60 * 60} +] if config("LOCAL_CRONS", default=False, cast=bool): cron_jobs += SINGLE_CRONS diff --git a/ee/api/routers/crons/ee_crons.py b/ee/api/routers/crons/ee_crons.py new file mode 100644 index 000000000..f59fc9edb --- /dev/null +++ b/ee/api/routers/crons/ee_crons.py @@ -0,0 +1,10 @@ +from chalicelib.utils import events_queue + + +def pg_events_queue() -> None: + events_queue.global_queue.force_flush() + + +ee_cron_jobs = [ + {"func": pg_events_queue, "trigger": "interval", "seconds": 60*5, "misfire_grace_time": 20}, +] \ No newline at end of file diff --git a/ee/api/routers/ee.py b/ee/api/routers/ee.py index b56ddd370..a8feb6b53 100644 --- a/ee/api/routers/ee.py +++ b/ee/api/routers/ee.py @@ -1,7 +1,7 @@ from typing import Union from chalicelib.core import roles, traces, projects, sourcemaps, assist_records, sessions -from chalicelib.core import unlock +from chalicelib.core import unlock, signals from chalicelib.utils import assist_helper unlock.check() @@ -116,3 +116,13 @@ def delete_record(projectId: int, recordId: int, context: schemas_ee.CurrentCont if "errors" in result: return result return {"data": result} + + +@app.post('/{projectId}/signals', tags=['signals']) +def send_interactions(projectId: int, data: schemas_ee.SignalsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = signals.handle_frontend_signals_queued(project_id=projectId, user_id=context.user_id, data=data) + + if "errors" in data: + return data + return {'data': data} \ No newline at end of file diff --git a/ee/api/schemas_ee.py b/ee/api/schemas_ee.py index afa4230b6..43df5a6f9 100644 --- a/ee/api/schemas_ee.py +++ b/ee/api/schemas_ee.py @@ -31,6 +31,14 @@ class RolePayloadSchema(BaseModel): alias_generator = schemas.attribute_to_camel_case +class SignalsSchema(BaseModel): + timestamp: int = Field(...) + action: str = Field(...) + source: str = Field(...) + category: str = Field(...) + data: dict = Field(default={}) + + class CreateMemberSchema(schemas.CreateMemberSchema): roleId: Optional[int] = Field(None) diff --git a/ee/backend/internal/db/datasaver/messages.go b/ee/backend/internal/db/datasaver/messages.go index f28bd3b8f..7f2863d0b 100644 --- a/ee/backend/internal/db/datasaver/messages.go +++ b/ee/backend/internal/db/datasaver/messages.go @@ -58,7 +58,7 @@ func (mi *Saver) InsertMessage(msg Message) error { return mi.pg.InsertWebJSException(m) case *IntegrationEvent: return mi.pg.InsertWebIntegrationEvent(m) - case *FetchEvent: + case *NetworkRequest: session, err := mi.pg.GetSession(sessionID) if err != nil { log.Printf("can't get session info for CH: %s", err) @@ -72,8 +72,8 @@ func (mi *Saver) InsertMessage(msg Message) error { } } } - return mi.pg.InsertWebFetchEvent(sessionID, m) - case *GraphQLEvent: + return mi.pg.InsertWebNetworkRequest(sessionID, m) + case *GraphQL: session, err := mi.pg.GetSession(sessionID) if err != nil { log.Printf("can't get session info for CH: %s", err) @@ -82,7 +82,7 @@ func (mi *Saver) InsertMessage(msg Message) error { log.Printf("can't insert graphQL event into clickhouse: %s", err) } } - return mi.pg.InsertWebGraphQLEvent(sessionID, m) + return mi.pg.InsertWebGraphQL(sessionID, m) case *SetPageLocation: return mi.pg.InsertSessionReferrer(sessionID, m.Referrer) diff --git a/ee/backend/pkg/db/clickhouse/connector.go b/ee/backend/pkg/db/clickhouse/connector.go index d882f793f..20996c073 100644 --- a/ee/backend/pkg/db/clickhouse/connector.go +++ b/ee/backend/pkg/db/clickhouse/connector.go @@ -27,9 +27,9 @@ type Connector interface { InsertWebErrorEvent(session *types.Session, msg *types.ErrorEvent) error InsertWebPerformanceTrackAggr(session *types.Session, msg *messages.PerformanceTrackAggr) error InsertAutocomplete(session *types.Session, msgType, msgValue string) error - InsertRequest(session *types.Session, msg *messages.FetchEvent, savePayload bool) error + InsertRequest(session *types.Session, msg *messages.NetworkRequest, savePayload bool) error InsertCustom(session *types.Session, msg *messages.CustomEvent) error - InsertGraphQL(session *types.Session, msg *messages.GraphQLEvent) error + InsertGraphQL(session *types.Session, msg *messages.GraphQL) error InsertIssue(session *types.Session, msg *messages.IssueEvent) error } @@ -289,7 +289,13 @@ func (c *connectorImpl) InsertWebErrorEvent(session *types.Session, msg *types.E keys = append(keys, k) values = append(values, v) } - + // Check error source before insert to avoid panic from clickhouse lib + switch msg.Source { + case "js_exception", "bugsnag", "cloudwatch", "datadog", "elasticsearch", "newrelic", "rollbar", "sentry", "stackdriver", "sumologic": + default: + return fmt.Errorf("unknown error source: %s", msg.Source) + } + // Insert event to batch if err := c.batches["errors"].Append( session.SessionID, uint16(session.ProjectID), @@ -352,7 +358,7 @@ func (c *connectorImpl) InsertAutocomplete(session *types.Session, msgType, msgV return nil } -func (c *connectorImpl) InsertRequest(session *types.Session, msg *messages.FetchEvent, savePayload bool) error { +func (c *connectorImpl) InsertRequest(session *types.Session, msg *messages.NetworkRequest, savePayload bool) error { urlMethod := url.EnsureMethod(msg.Method) if urlMethod == "" { return fmt.Errorf("can't parse http method. sess: %d, method: %s", session.SessionID, msg.Method) @@ -365,8 +371,8 @@ func (c *connectorImpl) InsertRequest(session *types.Session, msg *messages.Fetc if err := c.batches["requests"].Append( session.SessionID, uint16(session.ProjectID), - msg.MessageID, - datetime(msg.Timestamp), + msg.Meta().Index, + datetime(uint64(msg.Meta().Timestamp)), msg.URL, request, response, @@ -386,8 +392,8 @@ func (c *connectorImpl) InsertCustom(session *types.Session, msg *messages.Custo if err := c.batches["custom"].Append( session.SessionID, uint16(session.ProjectID), - msg.MessageID, - datetime(msg.Timestamp), + msg.Meta().Index, + datetime(uint64(msg.Meta().Timestamp)), msg.Name, msg.Payload, "CUSTOM", @@ -398,12 +404,12 @@ func (c *connectorImpl) InsertCustom(session *types.Session, msg *messages.Custo return nil } -func (c *connectorImpl) InsertGraphQL(session *types.Session, msg *messages.GraphQLEvent) error { +func (c *connectorImpl) InsertGraphQL(session *types.Session, msg *messages.GraphQL) error { if err := c.batches["graphql"].Append( session.SessionID, uint16(session.ProjectID), - msg.MessageID, - datetime(msg.Timestamp), + msg.Meta().Index, + datetime(uint64(msg.Meta().Timestamp)), msg.OperationName, nullableString(msg.Variables), nullableString(msg.Response), diff --git a/ee/backend/pkg/failover/failover.go b/ee/backend/pkg/failover/failover.go index 1b9321afc..11ff7e4be 100644 --- a/ee/backend/pkg/failover/failover.go +++ b/ee/backend/pkg/failover/failover.go @@ -91,7 +91,7 @@ func (s *sessionFinderImpl) worker() { func (s *sessionFinderImpl) findSession(sessionID, timestamp, partition uint64) { sessEnd := &messages.SessionEnd{Timestamp: timestamp} sessEnd.SetSessionID(sessionID) - err := s.storage.UploadSessionFiles(sessEnd) + err := s.storage.Upload(sessEnd) if err == nil { log.Printf("found session: %d in partition: %d, original: %d", sessionID, partition, sessionID%numberOfPartitions) diff --git a/ee/backend/pkg/kafka/consumer.go b/ee/backend/pkg/kafka/consumer.go index bea1f0604..fc8c98eaa 100644 --- a/ee/backend/pkg/kafka/consumer.go +++ b/ee/backend/pkg/kafka/consumer.go @@ -120,7 +120,6 @@ func (consumer *Consumer) commitAtTimestamps( if err != nil { return err } - logPartitions("Actually assigned:", assigned) var timestamps []kafka.TopicPartition for _, p := range assigned { // p is a copy here since it is not a pointer @@ -142,7 +141,6 @@ func (consumer *Consumer) commitAtTimestamps( if err != nil { return errors.Wrap(err, "Kafka Consumer retrieving committed error") } - logPartitions("Actually committed:", committed) for _, comm := range committed { if comm.Offset == kafka.OffsetStored || comm.Offset == kafka.OffsetInvalid || diff --git a/ee/backend/pkg/kafka/log.go b/ee/backend/pkg/kafka/log.go deleted file mode 100644 index c71c6d2bd..000000000 --- a/ee/backend/pkg/kafka/log.go +++ /dev/null @@ -1,15 +0,0 @@ -package kafka - -import ( - "fmt" - "log" - - "github.com/confluentinc/confluent-kafka-go/kafka" -) - -func logPartitions(s string, prts []kafka.TopicPartition) { - for _, p := range prts { - s = fmt.Sprintf("%v | %v", s, p.Partition) - } - log.Println(s) -} diff --git a/ee/connectors/msgcodec/messages.py b/ee/connectors/msgcodec/messages.py index 41181d924..313fdf12c 100644 --- a/ee/connectors/msgcodec/messages.py +++ b/ee/connectors/msgcodec/messages.py @@ -213,6 +213,20 @@ class MouseMove(Message): self.y = y +class NetworkRequest(Message): + __id__ = 21 + + def __init__(self, type, method, url, request, response, status, timestamp, duration): + self.type = type + self.method = method + self.url = url + self.request = request + self.response = response + self.status = status + self.timestamp = timestamp + self.duration = duration + + class ConsoleLog(Message): __id__ = 22 @@ -265,7 +279,7 @@ class IntegrationEvent(Message): self.payload = payload -class RawCustomEvent(Message): +class CustomEvent(Message): __id__ = 27 def __init__(self, name, payload): @@ -358,16 +372,6 @@ class ResourceEvent(Message): self.status = status -class CustomEvent(Message): - __id__ = 36 - - def __init__(self, message_id, timestamp, name, payload): - self.message_id = message_id - self.timestamp = timestamp - self.name = name - self.payload = payload - - class CSSInsertRule(Message): __id__ = 37 @@ -423,15 +427,6 @@ class StateAction(Message): self.type = type -class StateActionEvent(Message): - __id__ = 43 - - def __init__(self, message_id, timestamp, type): - self.message_id = message_id - self.timestamp = timestamp - self.type = type - - class Redux(Message): __id__ = 44 @@ -486,32 +481,6 @@ class PerformanceTrack(Message): self.used_js_heap_size = used_js_heap_size -class GraphQLEvent(Message): - __id__ = 50 - - def __init__(self, message_id, timestamp, operation_kind, operation_name, variables, response): - self.message_id = message_id - self.timestamp = timestamp - self.operation_kind = operation_kind - self.operation_name = operation_name - self.variables = variables - self.response = response - - -class FetchEvent(Message): - __id__ = 51 - - def __init__(self, message_id, timestamp, method, url, request, response, status, duration): - self.message_id = message_id - self.timestamp = timestamp - self.method = method - self.url = url - self.request = request - self.response = response - self.status = status - self.duration = duration - - class DOMDrop(Message): __id__ = 52 diff --git a/ee/connectors/msgcodec/msgcodec.py b/ee/connectors/msgcodec/msgcodec.py index 38bb6d3c2..dc63ffa79 100644 --- a/ee/connectors/msgcodec/msgcodec.py +++ b/ee/connectors/msgcodec/msgcodec.py @@ -237,6 +237,18 @@ class MessageCodec(Codec): y=self.read_uint(reader) ) + if message_id == 21: + return NetworkRequest( + type=self.read_string(reader), + method=self.read_string(reader), + url=self.read_string(reader), + request=self.read_string(reader), + response=self.read_string(reader), + status=self.read_uint(reader), + timestamp=self.read_uint(reader), + duration=self.read_uint(reader) + ) + if message_id == 22: return ConsoleLog( level=self.read_string(reader), @@ -280,7 +292,7 @@ class MessageCodec(Codec): ) if message_id == 27: - return RawCustomEvent( + return CustomEvent( name=self.read_string(reader), payload=self.read_string(reader) ) @@ -356,14 +368,6 @@ class MessageCodec(Codec): status=self.read_uint(reader) ) - if message_id == 36: - return CustomEvent( - message_id=self.read_uint(reader), - timestamp=self.read_uint(reader), - name=self.read_string(reader), - payload=self.read_string(reader) - ) - if message_id == 37: return CSSInsertRule( id=self.read_uint(reader), @@ -407,13 +411,6 @@ class MessageCodec(Codec): type=self.read_string(reader) ) - if message_id == 43: - return StateActionEvent( - message_id=self.read_uint(reader), - timestamp=self.read_uint(reader), - type=self.read_string(reader) - ) - if message_id == 44: return Redux( action=self.read_string(reader), @@ -456,28 +453,6 @@ class MessageCodec(Codec): used_js_heap_size=self.read_uint(reader) ) - if message_id == 50: - return GraphQLEvent( - message_id=self.read_uint(reader), - timestamp=self.read_uint(reader), - operation_kind=self.read_string(reader), - operation_name=self.read_string(reader), - variables=self.read_string(reader), - response=self.read_string(reader) - ) - - if message_id == 51: - return FetchEvent( - message_id=self.read_uint(reader), - timestamp=self.read_uint(reader), - method=self.read_string(reader), - url=self.read_string(reader), - request=self.read_string(reader), - response=self.read_string(reader), - status=self.read_uint(reader), - duration=self.read_uint(reader) - ) - if message_id == 52: return DOMDrop( timestamp=self.read_uint(reader) diff --git a/ee/recommendation/Dockerfile b/ee/recommendation/Dockerfile new file mode 100644 index 000000000..992bcf89a --- /dev/null +++ b/ee/recommendation/Dockerfile @@ -0,0 +1,14 @@ +FROM apache/airflow:2.4.3 +COPY requirements.txt . + +USER root +RUN apt-get update \ + && apt-get install -y \ + vim \ + && apt-get install gcc libc-dev g++ -y \ + && apt-get install -y pkg-config libxml2-dev libxmlsec1-dev libxmlsec1-openssl + + +USER airflow +RUN pip install --upgrade pip +RUN pip install -r requirements.txt diff --git a/ee/recommendation/clean.sh b/ee/recommendation/clean.sh new file mode 100644 index 000000000..857c8d63d --- /dev/null +++ b/ee/recommendation/clean.sh @@ -0,0 +1 @@ +docker-compose down --volumes --rmi all diff --git a/ee/recommendation/dags/training_dag.py b/ee/recommendation/dags/training_dag.py new file mode 100644 index 000000000..ff340f772 --- /dev/null +++ b/ee/recommendation/dags/training_dag.py @@ -0,0 +1,46 @@ +from datetime import datetime, timedelta +from textwrap import dedent + +import pendulum + +from airflow import DAG +from airflow.operators.bash import BashOperator +from airflow.operators.python import PythonOperator +import os +_work_dir = os.getcwd() + +def my_function(): + l = os.listdir('scripts') + print(l) + return l + +dag = DAG( + "first_test", + default_args={ + "depends_on_past": True, + "retries": 1, + "retry_delay": timedelta(minutes=3), + }, + start_date=pendulum.datetime(2015, 12, 1, tz="UTC"), + description="My first test", + schedule="@daily", + catchup=False, +) + + +#assigning the task for our dag to do +with dag: + first_world = PythonOperator( + task_id='FirstTest', + python_callable=my_function, + ) + hello_world = BashOperator( + task_id='OneTest', + bash_command=f'python {_work_dir}/scripts/processing.py --batch_size 500', + # provide_context=True + ) + this_world = BashOperator( + task_id='ThisTest', + bash_command=f'python {_work_dir}/scripts/task.py --mode train --kernel linear', + ) + first_world >> hello_world >> this_world diff --git a/ee/recommendation/docker-compose.yaml b/ee/recommendation/docker-compose.yaml new file mode 100644 index 000000000..d7d068551 --- /dev/null +++ b/ee/recommendation/docker-compose.yaml @@ -0,0 +1,285 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL. +# +# WARNING: This configuration is for local development. Do not use it in a production deployment. +# +# This configuration supports basic configuration using environment variables or an .env file +# The following variables are supported: +# +# AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow. +# Default: apache/airflow:2.4.3 +# AIRFLOW_UID - User ID in Airflow containers +# Default: 50000 +# Those configurations are useful mostly in case of standalone testing/running Airflow in test/try-out mode +# +# _AIRFLOW_WWW_USER_USERNAME - Username for the administrator account (if requested). +# Default: airflow +# _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account (if requested). +# Default: airflow +# _PIP_ADDITIONAL_REQUIREMENTS - Additional PIP requirements to add when starting all containers. +# Default: '' +# +# Feel free to modify this file to suit your needs. +--- +version: '3' +x-airflow-common: + &airflow-common + # In order to add custom dependencies or upgrade provider packages you can use your extended image. + # Comment the image line, place your Dockerfile in the directory where you placed the docker-compose.yaml + # and uncomment the "build" line below, Then run `docker-compose build` to build the images. + # image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.4.3} + build: . + environment: + &airflow-common-env + AIRFLOW__CORE__EXECUTOR: CeleryExecutor + AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow + # For backward compatibility, with Airflow <2.3 + AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0 + AIRFLOW__CORE__FERNET_KEY: '' + AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true' + AIRFLOW__CORE__LOAD_EXAMPLES: 'false' + AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth' + _PIP_ADDITIONAL_REQUIREMENTS: 'argcomplete' + AIRFLOW__CODE_EDITOR__ENABLED: 'true' + AIRFLOW__CODE_EDITOR__GIT_ENABLED: 'false' + AIRFLOW__CODE_EDITOR__STRING_NORMALIZATION: 'true' + AIRFLOW__CODE_EDITOR__MOUNT: '/opt/airflow/dags' + pg_user: "${pg_user}" + pg_password: "${pg_password}" + pg_dbname: "${pg_dbname}" + pg_host: "${pg_host}" + pg_port: "${pg_port}" + PG_TIMEOUT: "${PG_TIMEOUT}" + PG_POOL: "${PG_POOL}" + volumes: + - ./dags:/opt/airflow/dags + - ./logs:/opt/airflow/logs + - ./plugins:/opt/airflow/plugins + - ./scripts:/opt/airflow/scripts + - ./cache:/opt/airflow/cache + user: "${AIRFLOW_UID:-50000}:0" + depends_on: + &airflow-common-depends-on + redis: + condition: service_healthy + postgres: + condition: service_healthy + +services: + postgres: + image: postgres:13 + environment: + POSTGRES_USER: airflow + POSTGRES_PASSWORD: airflow + POSTGRES_DB: airflow + volumes: + - postgres-db-volume:/var/lib/postgresql/data + healthcheck: + test: ["CMD", "pg_isready", "-U", "airflow"] + interval: 5s + retries: 5 + restart: always + + redis: + image: redis:latest + expose: + - 6379 + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 5s + timeout: 30s + retries: 50 + restart: always + + airflow-webserver: + <<: *airflow-common + command: webserver + ports: + - 8080:8080 + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] + interval: 10s + timeout: 10s + retries: 5 + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + + airflow-scheduler: + <<: *airflow-common + command: scheduler + healthcheck: + test: ["CMD-SHELL", 'airflow jobs check --job-type SchedulerJob --hostname "$${HOSTNAME}"'] + interval: 10s + timeout: 10s + retries: 5 + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + + airflow-worker: + <<: *airflow-common + command: celery worker + healthcheck: + test: + - "CMD-SHELL" + - 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' + interval: 10s + timeout: 10s + retries: 5 + environment: + <<: *airflow-common-env + # Required to handle warm shutdown of the celery workers properly + # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation + DUMB_INIT_SETSID: "0" + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + + airflow-triggerer: + <<: *airflow-common + command: triggerer + healthcheck: + test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"'] + interval: 10s + timeout: 10s + retries: 5 + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + + airflow-init: + <<: *airflow-common + entrypoint: /bin/bash + # yamllint disable rule:line-length + command: + - -c + - | + function ver() { + printf "%04d%04d%04d%04d" $${1//./ } + } + register-python-argcomplete airflow >> ~/.bashrc + airflow_version=$$(AIRFLOW__LOGGING__LOGGING_LEVEL=INFO && gosu airflow airflow version) + airflow_version_comparable=$$(ver $${airflow_version}) + min_airflow_version=2.2.0 + min_airflow_version_comparable=$$(ver $${min_airflow_version}) + if [[ -z "${AIRFLOW_UID}" ]]; then + echo + echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m" + echo "If you are on Linux, you SHOULD follow the instructions below to set " + echo "AIRFLOW_UID environment variable, otherwise files will be owned by root." + echo "For other operating systems you can get rid of the warning with manually created .env file:" + echo " See: https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#setting-the-right-airflow-user" + echo + fi + one_meg=1048576 + mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg)) + cpus_available=$$(grep -cE 'cpu[0-9]+' /proc/stat) + disk_available=$$(df / | tail -1 | awk '{print $$4}') + warning_resources="false" + if (( mem_available < 4000 )) ; then + echo + echo -e "\033[1;33mWARNING!!!: Not enough memory available for Docker.\e[0m" + echo "At least 4GB of memory required. You have $$(numfmt --to iec $$((mem_available * one_meg)))" + echo + warning_resources="true" + fi + if (( cpus_available < 2 )); then + echo + echo -e "\033[1;33mWARNING!!!: Not enough CPUS available for Docker.\e[0m" + echo "At least 2 CPUs recommended. You have $${cpus_available}" + echo + warning_resources="true" + fi + if (( disk_available < one_meg * 10 )); then + echo + echo -e "\033[1;33mWARNING!!!: Not enough Disk space available for Docker.\e[0m" + echo "At least 10 GBs recommended. You have $$(numfmt --to iec $$((disk_available * 1024 )))" + echo + warning_resources="true" + fi + if [[ $${warning_resources} == "true" ]]; then + echo + echo -e "\033[1;33mWARNING!!!: You have not enough resources to run Airflow (see above)!\e[0m" + echo "Please follow the instructions to increase amount of resources available:" + echo " https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#before-you-begin" + echo + fi + mkdir -p /sources/logs /sources/dags /sources/plugins + chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins} + exec /entrypoint airflow version + # yamllint enable rule:line-length + environment: + <<: *airflow-common-env + _AIRFLOW_DB_UPGRADE: 'true' + _AIRFLOW_WWW_USER_CREATE: 'true' + _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow} + _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} + _PIP_ADDITIONAL_REQUIREMENTS: '' + user: "0:0" + volumes: + - .:/sources + + airflow-cli: + <<: *airflow-common + profiles: + - debug + environment: + <<: *airflow-common-env + CONNECTION_CHECK_MAX_COUNT: "0" + # Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252 + command: + - bash + - -c + - airflow + + # You can enable flower by adding "--profile flower" option e.g. docker-compose --profile flower up + # or by explicitly targeted on the command line e.g. docker-compose up flower. + # See: https://docs.docker.com/compose/profiles/ + flower: + <<: *airflow-common + command: celery flower + profiles: + - flower + ports: + - 5555:5555 + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:5555/"] + interval: 10s + timeout: 10s + retries: 5 + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + +volumes: + postgres-db-volume: diff --git a/ee/recommendation/requirements.txt b/ee/recommendation/requirements.txt new file mode 100644 index 000000000..7f0d26c2e --- /dev/null +++ b/ee/recommendation/requirements.txt @@ -0,0 +1,22 @@ +requests==2.28.1 +urllib3==1.26.12 +pyjwt==2.5.0 +psycopg2-binary==2.9.3 + +numpy +threadpoolctl==3.1.0 +joblib==1.2.0 +scipy +scikit-learn +mlflow + +airflow-code-editor + +pydantic[email]==1.10.2 + +clickhouse-driver==0.2.4 +python3-saml==1.14.0 +python-multipart==0.0.5 +python-decouple + +argcomplete diff --git a/ee/recommendation/run.sh b/ee/recommendation/run.sh new file mode 100644 index 000000000..0a703bca4 --- /dev/null +++ b/ee/recommendation/run.sh @@ -0,0 +1,11 @@ +echo 'Setting up required modules..' +mkdir scripts +mkdir plugins +mkdir logs +mkdir scripts/utils +cp ../../api/chalicelib/utils/pg_client.py scripts/utils +cp ../api/chalicelib/utils/ch_client.py scripts/utils +echo 'Building containers...' +docker-compose up airflow-init +echo 'Running containers...' +docker-compose up diff --git a/ee/recommendation/scripts/core/features.py b/ee/recommendation/scripts/core/features.py new file mode 100644 index 000000000..c2e21535e --- /dev/null +++ b/ee/recommendation/scripts/core/features.py @@ -0,0 +1,161 @@ +from utils.ch_client import ClickHouseClient +from utils.pg_client import PostgresClient + +def get_features_clickhouse(**kwargs): + """Gets features from ClickHouse database""" + if 'limit' in kwargs: + limit = kwargs['limit'] + else: + limit = 500 + query = f"""SELECT session_id, project_id, user_id, events_count, errors_count, duration, country, issue_score, device_type, rage, jsexception, badrequest FROM ( + SELECT session_id, project_id, user_id, events_count, errors_count, duration, toInt8(user_country) as country, issue_score, toInt8(user_device_type) as device_type FROM experimental.sessions WHERE user_id IS NOT NULL) as T1 +INNER JOIN (SELECT session_id, project_id, sum(issue_type = 'click_rage') as rage, sum(issue_type = 'js_exception') as jsexception, sum(issue_type = 'bad_request') as badrequest FROM experimental.events WHERE event_type = 'ISSUE' AND session_id > 0 GROUP BY session_id, project_id LIMIT {limit}) as T2 +ON T1.session_id = T2.session_id AND T1.project_id = T2.project_id;""" + with ClickHouseClient() as conn: + res = conn.execute(query) + return res + + +def get_features_postgres(**kwargs): + with PostgresClient() as conn: + funnels = query_funnels(conn, **kwargs) + metrics = query_metrics(conn, **kwargs) + filters = query_with_filters(conn, **kwargs) + #clean_filters(funnels) + #clean_filters(filters) + return clean_filters_split(funnels, isfunnel=True), metrics, clean_filters_split(filters) + + + +def query_funnels(conn, **kwargs): + """Gets Funnels (PG database)""" + # If public.funnel is empty + funnels_query = f"""SELECT project_id, user_id, filter FROM (SELECT project_id, user_id, metric_id FROM public.metrics WHERE metric_type='funnel' + ) as T1 LEFT JOIN (SELECT filter, metric_id FROM public.metric_series) as T2 ON T1.metric_id = T2.metric_id""" + # Else + # funnels_query = "SELECT project_id, user_id, filter FROM public.funnels" + + conn.execute(funnels_query) + res = conn.fetchall() + return res + + +def query_metrics(conn, **kwargs): + """Gets Metrics (PG_database)""" + metrics_query = """SELECT metric_type, metric_of, metric_value, metric_format FROM public.metrics""" + conn.execute(metrics_query) + res = conn.fetchall() + return res + + +def query_with_filters(conn, **kwargs): + """Gets Metrics with filters (PG database)""" + filters_query = """SELECT T1.metric_id as metric_id, project_id, name, metric_type, metric_of, filter FROM ( + SELECT metric_id, project_id, name, metric_type, metric_of FROM metrics) as T1 INNER JOIN + (SELECT metric_id, filter FROM metric_series WHERE filter != '{}') as T2 ON T1.metric_id = T2.metric_id""" + conn.execute(filters_query) + res = conn.fetchall() + return res + + +def transform_funnel(project_id, user_id, data): + res = list() + for k in range(len(data)): + _tmp = data[k] + if _tmp['project_id'] != project_id or _tmp['user_id'] != user_id: + continue + else: + _tmp = _tmp['filter']['events'] + res.append(_tmp) + return res + + +def transform_with_filter(data, *kwargs): + res = list() + for k in range(len(data)): + _tmp = data[k] + jump = False + for _key in kwargs.keys(): + if data[_key] != kwargs[_key]: + jump = True + break + if jump: + continue + _type = data['metric_type'] + if _type == 'funnel': + res.append(['funnel', _tmp['filter']['events']]) + elif _type == 'timeseries': + res.append(['timeseries', _tmp['filter']['filters'], _tmp['filter']['events']]) + elif _type == 'table': + res.append(['table', _tmp['metric_of'], _tmp['filter']['events']]) + return res + + +def transform(element): + key_ = element.pop('user_id') + secondary_key_ = element.pop('session_id') + context_ = element.pop('project_id') + features_ = element + del element + return {(key_, context_): {secondary_key_: list(features_.values())}} + + +def get_by_project(data, project_id): + head_ = [list(d.keys())[0][1] for d in data] + index_ = [k for k in range(len(head_)) if head_[k] == project_id] + return [data[k] for k in index_] + + +def get_by_user(data, user_id): + head_ = [list(d.keys())[0][0] for d in data] + index_ = [k for k in range(len(head_)) if head_[k] == user_id] + return [data[k] for k in index_] + + +def clean_filters(data): + for j in range(len(data)): + _filter = data[j]['filter'] + _tmp = list() + for i in range(len(_filter['filters'])): + if 'value' in _filter['filters'][i].keys(): + _tmp.append({'type': _filter['filters'][i]['type'], + 'value': _filter['filters'][i]['value'], + 'operator': _filter['filters'][i]['operator']}) + data[j]['filter'] = _tmp + + +def clean_filters_split(data, isfunnel=False): + _data = list() + for j in range(len(data)): + _filter = data[j]['filter'] + _tmp = list() + for i in range(len(_filter['filters'])): + if 'value' in _filter['filters'][i].keys(): + _type = _filter['filters'][i]['type'] + _value = _filter['filters'][i]['value'] + if isinstance(_value, str): + _value = [_value] + _operator = _filter['filters'][i]['operator'] + if isfunnel: + _data.append({'project_id': data[j]['project_id'], 'user_id': data[j]['user_id'], + 'type': _type, + 'value': _value, + 'operator': _operator + }) + else: + _data.append({'metric_id': data[j]['metric_id'], 'project_id': data[j]['project_id'], + 'name': data[j]['name'], 'metric_type': data[j]['metric_type'], + 'metric_of': data[j]['metric_of'], + 'type': _type, + 'value': _value, + 'operator': _operator + }) + return _data + +def test(): + print('One test') + +if __name__ == '__main__': + print('Just a test') + #data = get_features_clickhouse() + #print('Data length:', len(data)) diff --git a/ee/recommendation/scripts/core/recommendation_model.py b/ee/recommendation/scripts/core/recommendation_model.py new file mode 100644 index 000000000..9dae948a7 --- /dev/null +++ b/ee/recommendation/scripts/core/recommendation_model.py @@ -0,0 +1,15 @@ +from sklearn.svm import SVC + +class SVM_recommendation(): + def __init__(**params): + f"""{SVC.__doc__}""" + self.svm = SVC(params) + + def fit(self, X1=None, X2=None): + assert X1 is not None or X2 is not None, 'X1 or X2 must be given' + self.svm.fit(X1) + self.svm.fit(X2) + + + def predict(self, X): + return self.svm.predict(X) diff --git a/ee/recommendation/scripts/model_registry.py b/ee/recommendation/scripts/model_registry.py new file mode 100644 index 000000000..80d6dbde6 --- /dev/null +++ b/ee/recommendation/scripts/model_registry.py @@ -0,0 +1,60 @@ +import mlflow +## +import numpy as np +import pickle + +from sklearn import datasets, linear_model +from sklearn.metrics import mean_squared_error, r2_score + +# source: https://scikit-learn.org/stable/auto_examples/linear_model/plot_ols.html + +# Load the diabetes dataset +diabetes_X, diabetes_y = datasets.load_diabetes(return_X_y=True) + +# Use only one feature +diabetes_X = diabetes_X[:, np.newaxis, 2] + +# Split the data into training/testing sets +diabetes_X_train = diabetes_X[:-20] +diabetes_X_test = diabetes_X[-20:] + +# Split the targets into training/testing sets +diabetes_y_train = diabetes_y[:-20] +diabetes_y_test = diabetes_y[-20:] + + +def print_predictions(m, y_pred): + + # The coefficients + print('Coefficients: \n', m.coef_) + # The mean squared error + print('Mean squared error: %.2f' + % mean_squared_error(diabetes_y_test, y_pred)) + # The coefficient of determination: 1 is perfect prediction + print('Coefficient of determination: %.2f' + % r2_score(diabetes_y_test, y_pred)) + +# Create linear regression object +lr_model = linear_model.LinearRegression() + +# Train the model using the training sets +lr_model.fit(diabetes_X_train, diabetes_y_train) + +# Make predictions using the testing set +diabetes_y_pred = lr_model.predict(diabetes_X_test) +print_predictions(lr_model, diabetes_y_pred) + +# save the model in the native sklearn format +filename = 'lr_model.pkl' +pickle.dump(lr_model, open(filename, 'wb')) +## +# load the model into memory +loaded_model = pickle.load(open(filename, 'rb')) + +# log and register the model using MLflow scikit-learn API +mlflow.set_tracking_uri("postgresql+psycopg2://airflow:airflow@postgres/mlruns") +reg_model_name = "SklearnLinearRegression" +print("--") +mlflow.sklearn.log_model(loaded_model, "sk_learn", + serialization_format="cloudpickle", + registered_model_name=reg_model_name) diff --git a/ee/recommendation/scripts/processing.py b/ee/recommendation/scripts/processing.py new file mode 100644 index 000000000..8f3631655 --- /dev/null +++ b/ee/recommendation/scripts/processing.py @@ -0,0 +1,42 @@ +import time +import argparse +from core import features +from utils import pg_client +import multiprocessing as mp +from decouple import config +import asyncio +import pandas + + +def features_ch(q): + q.put(features.get_features_clickhouse()) + +def features_pg(q): + q.put(features.get_features_postgres()) + +def get_features(): + #mp.set_start_method('spawn') + #q = mp.Queue() + #p1 = mp.Process(target=features_ch, args=(q,)) + #p1.start() + pg_features = features.get_features_postgres() + ch_features = []#p1.join() + return [pg_features, ch_features] + + +parser = argparse.ArgumentParser(description='Gets and process data from Postgres and ClickHouse.') +parser.add_argument('--batch_size', type=int, required=True, help='--batch_size max size of columns per file to be saved in opt/airflow/cache') + +args = parser.parse_args() + +if __name__ == '__main__': + asyncio.run(pg_client.init()) + print(args) + t1 = time.time() + data = get_features() + #print(data) + cache_dir = config("data_dir", default=f"/opt/airflow/cache") + for d in data[0]: + pandas.DataFrame(d).to_csv(f'{cache_dir}/tmp-{hash(time.time())}', sep=',') + t2 = time.time() + print(f'DONE! information retrieved in {t2-t1: .2f} seconds') diff --git a/ee/recommendation/scripts/task.py b/ee/recommendation/scripts/task.py new file mode 100644 index 000000000..b427fa1c5 --- /dev/null +++ b/ee/recommendation/scripts/task.py @@ -0,0 +1,41 @@ +import time +import argparse +from decouple import config +from core import recommendation_model + +import pandas +import json +import os + + +def transform_dict_string(s_dicts): + data = list() + for s_dict in s_dicts: + data.append(json.loads(s_dict.replace("'", '"').replace('None','null').replace('False','false'))) + return data + +def process_file(file_name): + return pandas.read_csv(file_name, sep=",") + + +def read_batches(): + base_dir = config('dir_path', default='/opt/airflow/cache') + files = os.listdir(base_dir) + for file in files: + yield process_file(f'{base_dir}/{file}') + + +parser = argparse.ArgumentParser(description='Handle machine learning inputs.') +parser.add_argument('--mode', choices=['train', 'test'], required=True, help='--mode sets the model in train or test mode') +parser.add_argument('--kernel', default='linear', help='--kernel set the kernel to be used for SVM') + +args = parser.parse_args() + +if __name__ == '__main__': + print(args) + t1 = time.time() + buff = read_batches() + for b in buff: + print(b.head()) + t2 = time.time() + print(f'DONE! information retrieved in {t2-t1: .2f} seconds') diff --git a/ee/recommendation/scripts/utils/ch_client.py b/ee/recommendation/scripts/utils/ch_client.py new file mode 100644 index 000000000..514820212 --- /dev/null +++ b/ee/recommendation/scripts/utils/ch_client.py @@ -0,0 +1,54 @@ +import logging + +import clickhouse_driver +from decouple import config + +logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) + +settings = {} +if config('ch_timeout', cast=int, default=-1) > 0: + logging.info(f"CH-max_execution_time set to {config('ch_timeout')}s") + settings = {**settings, "max_execution_time": config('ch_timeout', cast=int)} + +if config('ch_receive_timeout', cast=int, default=-1) > 0: + logging.info(f"CH-receive_timeout set to {config('ch_receive_timeout')}s") + settings = {**settings, "receive_timeout": config('ch_receive_timeout', cast=int)} + + +class ClickHouseClient: + __client = None + + def __init__(self): + self.__client = clickhouse_driver.Client(host=config("ch_host"), + database="default", + port=config("ch_port", cast=int), + settings=settings) \ + if self.__client is None else self.__client + + def __enter__(self): + return self + + def execute(self, query, params=None, **args): + try: + results = self.__client.execute(query=query, params=params, with_column_types=True, **args) + keys = tuple(x for x, y in results[1]) + return [dict(zip(keys, i)) for i in results[0]] + except Exception as err: + logging.error("--------- CH QUERY EXCEPTION -----------") + logging.error(self.format(query=query, params=params)) + logging.error("--------------------") + raise err + + def insert(self, query, params=None, **args): + return self.__client.execute(query=query, params=params, **args) + + def client(self): + return self.__client + + def format(self, query, params): + if params is None: + return query + return self.__client.substitute_params(query, params, self.__client.connection.context) + + def __exit__(self, *args): + pass diff --git a/ee/recommendation/scripts/utils/pg_client.py b/ee/recommendation/scripts/utils/pg_client.py new file mode 100644 index 000000000..69a5b5a8b --- /dev/null +++ b/ee/recommendation/scripts/utils/pg_client.py @@ -0,0 +1,166 @@ +import logging +import time +from threading import Semaphore + +import psycopg2 +import psycopg2.extras +from decouple import config +from psycopg2 import pool + +logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) +logging.getLogger('apscheduler').setLevel(config("LOGLEVEL", default=logging.INFO)) + +_PG_CONFIG = {"host": config("pg_host"), + "database": config("pg_dbname"), + "user": config("pg_user"), + "password": config("pg_password"), + "port": config("pg_port", cast=int), + "application_name": config("APP_NAME", default="PY")} +PG_CONFIG = dict(_PG_CONFIG) +if config("PG_TIMEOUT", cast=int, default=0) > 0: + PG_CONFIG["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int) * 1000}" + + +class ORThreadedConnectionPool(psycopg2.pool.ThreadedConnectionPool): + def __init__(self, minconn, maxconn, *args, **kwargs): + self._semaphore = Semaphore(maxconn) + super().__init__(minconn, maxconn, *args, **kwargs) + + def getconn(self, *args, **kwargs): + self._semaphore.acquire() + try: + return super().getconn(*args, **kwargs) + except psycopg2.pool.PoolError as e: + if str(e) == "connection pool is closed": + make_pool() + raise e + + def putconn(self, *args, **kwargs): + try: + super().putconn(*args, **kwargs) + self._semaphore.release() + except psycopg2.pool.PoolError as e: + if str(e) == "trying to put unkeyed connection": + print("!!! trying to put unkeyed connection") + print(f"env-PG_POOL:{config('PG_POOL', default=None)}") + return + raise e + + +postgreSQL_pool: ORThreadedConnectionPool = None + +RETRY_MAX = config("PG_RETRY_MAX", cast=int, default=50) +RETRY_INTERVAL = config("PG_RETRY_INTERVAL", cast=int, default=2) +RETRY = 0 + + +def make_pool(): + if not config('PG_POOL', cast=bool, default=True): + return + global postgreSQL_pool + global RETRY + if postgreSQL_pool is not None: + try: + postgreSQL_pool.closeall() + except (Exception, psycopg2.DatabaseError) as error: + logging.error("Error while closing all connexions to PostgreSQL", error) + try: + postgreSQL_pool = ORThreadedConnectionPool(config("PG_MINCONN", cast=int, default=20), + config("PG_MAXCONN", cast=int, default=80), + **PG_CONFIG) + if (postgreSQL_pool): + logging.info("Connection pool created successfully") + except (Exception, psycopg2.DatabaseError) as error: + logging.error("Error while connecting to PostgreSQL", error) + if RETRY < RETRY_MAX: + RETRY += 1 + logging.info(f"waiting for {RETRY_INTERVAL}s before retry n°{RETRY}") + time.sleep(RETRY_INTERVAL) + make_pool() + else: + raise error + + +class PostgresClient: + connection = None + cursor = None + long_query = False + unlimited_query = False + + def __init__(self, long_query=False, unlimited_query=False): + self.long_query = long_query + self.unlimited_query = unlimited_query + if unlimited_query: + long_config = dict(_PG_CONFIG) + long_config["application_name"] += "-UNLIMITED" + self.connection = psycopg2.connect(**long_config) + elif long_query: + long_config = dict(_PG_CONFIG) + long_config["application_name"] += "-LONG" + long_config["options"] = f"-c statement_timeout=" \ + f"{config('pg_long_timeout', cast=int, default=5 * 60) * 1000}" + self.connection = psycopg2.connect(**long_config) + elif not config('PG_POOL', cast=bool, default=True): + single_config = dict(_PG_CONFIG) + single_config["application_name"] += "-NOPOOL" + single_config["options"] = f"-c statement_timeout={config('PG_TIMEOUT', cast=int, default=30) * 1000}" + self.connection = psycopg2.connect(**single_config) + else: + self.connection = postgreSQL_pool.getconn() + + def __enter__(self): + if self.cursor is None: + self.cursor = self.connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + self.cursor.recreate = self.recreate_cursor + return self.cursor + + def __exit__(self, *args): + try: + self.connection.commit() + self.cursor.close() + if self.long_query or self.unlimited_query: + self.connection.close() + except Exception as error: + logging.error("Error while committing/closing PG-connection", error) + if str(error) == "connection already closed" \ + and not self.long_query \ + and not self.unlimited_query \ + and config('PG_POOL', cast=bool, default=True): + logging.info("Recreating the connexion pool") + make_pool() + else: + raise error + finally: + if config('PG_POOL', cast=bool, default=True) \ + and not self.long_query \ + and not self.unlimited_query: + postgreSQL_pool.putconn(self.connection) + + def recreate_cursor(self, rollback=False): + if rollback: + try: + self.connection.rollback() + except Exception as error: + logging.error("Error while rollbacking connection for recreation", error) + try: + self.cursor.close() + except Exception as error: + logging.error("Error while closing cursor for recreation", error) + self.cursor = None + return self.__enter__() + + +async def init(): + logging.info(f">PG_POOL:{config('PG_POOL', default=None)}") + if config('PG_POOL', cast=bool, default=True): + make_pool() + + +async def terminate(): + global postgreSQL_pool + if postgreSQL_pool is not None: + try: + postgreSQL_pool.closeall() + logging.info("Closed all connexions to PostgreSQL") + except (Exception, psycopg2.DatabaseError) as error: + logging.error("Error while closing all connexions to PostgreSQL", error) diff --git a/ee/recommendation/signals.sql b/ee/recommendation/signals.sql new file mode 100644 index 000000000..5500969ed --- /dev/null +++ b/ee/recommendation/signals.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS frontend_signals +( + project_id bigint NOT NULL, + user_id text NOT NULL, + timestamp bigint NOT NULL, + action text NOT NULL, + source text NOT NULL, + category text NOT NULL, + data json +); +CREATE INDEX IF NOT EXISTS frontend_signals_user_id_idx ON frontend_signals (user_id); diff --git a/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql new file mode 100644 index 000000000..a34074856 --- /dev/null +++ b/ee/scripts/schema/db/init_dbs/postgresql/1.10.0/1.10.0.sql @@ -0,0 +1,20 @@ +BEGIN; +CREATE OR REPLACE FUNCTION openreplay_version() + RETURNS text AS +$$ +SELECT 'v1.10.0-ee' +$$ LANGUAGE sql IMMUTABLE; + +CREATE TABLE IF NOT EXISTS frontend_signals +( + project_id bigint NOT NULL, + user_id integer NOT NULL references users (user_id) ON DELETE CASCADE, + timestamp bigint NOT NULL, + action text NOT NULL, + source text NOT NULL, + category text NOT NULL, + data jsonb +); +CREATE INDEX IF NOT EXISTS frontend_signals_user_id_idx ON frontend_signals (user_id); + +COMMIT; diff --git a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql index c4ac5980f..44e2bc03a 100644 --- a/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql +++ b/ee/scripts/schema/db/init_dbs/postgresql/init_schema.sql @@ -647,6 +647,19 @@ $$ CREATE INDEX IF NOT EXISTS user_favorite_sessions_user_id_session_id_idx ON user_favorite_sessions (user_id, session_id); + CREATE TABLE IF NOT EXISTS frontend_signals + ( + project_id bigint NOT NULL, + user_id text NOT NULL, + timestamp bigint NOT NULL, + action text NOT NULL, + source text NOT NULL, + category text NOT NULL, + data json + ); + CREATE INDEX IF NOT EXISTS frontend_signals_user_id_idx ON frontend_signals (user_id); + + CREATE TABLE IF NOT EXISTS assigned_sessions ( session_id bigint NOT NULL REFERENCES sessions (session_id) ON DELETE CASCADE, diff --git a/frontend/.storybook/config.js b/frontend/.storybook/config.DEPRECATED.js similarity index 90% rename from frontend/.storybook/config.js rename to frontend/.storybook/config.DEPRECATED.js index 1ff1f28d1..fad172b6f 100644 --- a/frontend/.storybook/config.js +++ b/frontend/.storybook/config.DEPRECATED.js @@ -2,13 +2,10 @@ import { configure, addDecorator } from '@storybook/react'; import { Provider } from 'react-redux'; import store from '../app/store'; import { MemoryRouter } from "react-router" -import { PlayerProvider } from '../app/player/store' const withProvider = (story) => ( - { story() } - ) @@ -33,4 +30,4 @@ configure( require.context('../app', true, /\.stories\.js$/), ], module -); \ No newline at end of file +); diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 5e6c9b3b0..ccd4655ee 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -14,9 +14,13 @@ COPY nginx.conf /etc/nginx/conf.d/default.conf # Default step in docker build FROM nginx:alpine LABEL maintainer=Rajesh +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA COPY --from=builder /work/public /var/www/openreplay COPY nginx.conf /etc/nginx/conf.d/default.conf +ENV GIT_SHA=$GIT_SHA + EXPOSE 8080 RUN chown -R nginx:nginx /var/cache/nginx && \ chown -R nginx:nginx /var/log/nginx && \ diff --git a/frontend/app/Router.js b/frontend/app/Router.js index 6a4aea446..e99ee2546 100644 --- a/frontend/app/Router.js +++ b/frontend/app/Router.js @@ -6,7 +6,6 @@ import { Notification } from 'UI'; import { Loader } from 'UI'; import { fetchUserInfo } from 'Duck/user'; import withSiteIdUpdater from 'HOCs/withSiteIdUpdater'; -import WidgetViewPure from 'Components/Dashboard/components/WidgetView'; import Header from 'Components/Header/Header'; import { fetchList as fetchSiteList } from 'Duck/site'; import { fetchList as fetchAnnouncements } from 'Duck/announcements'; @@ -37,6 +36,7 @@ const ErrorsPure = lazy(() => import('Components/Errors/Errors')); const FunnelDetailsPure = lazy(() => import('Components/Funnels/FunnelDetails')); const FunnelIssueDetails = lazy(() => import('Components/Funnels/FunnelIssueDetails')); const FunnelPagePure = lazy(() => import('Components/Funnels/FunnelPage')); +const MultiviewPure = lazy(() => import('Components/Session_/Multiview/Multiview.tsx')); const BugFinder = withSiteIdUpdater(BugFinderPure); const Dashboard = withSiteIdUpdater(DashboardPure); @@ -49,6 +49,7 @@ const Errors = withSiteIdUpdater(ErrorsPure); const FunnelPage = withSiteIdUpdater(FunnelPagePure); const FunnelsDetails = withSiteIdUpdater(FunnelDetailsPure); const FunnelIssue = withSiteIdUpdater(FunnelIssueDetails); +const Multiview = withSiteIdUpdater(MultiviewPure) const withSiteId = routes.withSiteId; const METRICS_PATH = routes.metrics(); @@ -67,6 +68,7 @@ const DASHBOARD_METRIC_DETAILS_PATH = routes.dashboardMetricDetails(); // const WIDGET_PATAH = routes.dashboardMetric(); const SESSIONS_PATH = routes.sessions(); const ASSIST_PATH = routes.assist(); +const RECORDINGS_PATH = routes.recordings(); const ERRORS_PATH = routes.errors(); const ERROR_PATH = routes.error(); const FUNNEL_PATH = routes.funnels(); @@ -80,6 +82,8 @@ const FORGOT_PASSWORD = routes.forgotPassword(); const CLIENT_PATH = routes.client(); const ONBOARDING_PATH = routes.onboarding(); const ONBOARDING_REDIRECT_PATH = routes.onboarding(OB_DEFAULT_TAB); +const MULTIVIEW_PATH = routes.multiview(); +const MULTIVIEW_INDEX_PATH = routes.multiviewIndex(); @withStore @withRouter @@ -170,8 +174,14 @@ class Router extends React.Component { render() { const { isLoggedIn, jwt, siteId, sites, loading, changePassword, location, existingTenant, onboarding, isEnterprise } = this.props; const siteIdList = sites.map(({ id }) => id).toJS(); - const hideHeader = (location.pathname && location.pathname.includes('/session/')) || location.pathname.includes('/assist/'); - const isPlayer = isRoute(SESSION_PATH, location.pathname) || isRoute(LIVE_SESSION_PATH, location.pathname); + const hideHeader = (location.pathname && location.pathname.includes('/session/')) + || location.pathname.includes('/assist/') + || location.pathname.includes('multiview'); + const isPlayer = isRoute(SESSION_PATH, location.pathname) + || isRoute(LIVE_SESSION_PATH, location.pathname) + || isRoute(MULTIVIEW_PATH, location.pathname) + || isRoute(MULTIVIEW_INDEX_PATH, location.pathname); + const redirectToOnboarding = !onboarding && localStorage.getItem(GLOBAL_HAS_NO_RECORDINGS) === 'true' return isLoggedIn ? ( @@ -194,6 +204,12 @@ class Router extends React.Component { state: tenantId, }); break; + case '/integrations/msteams': + client.post('integrations/msteams/add', { + code: location.search.split('=')[1], + state: tenantId, + }); + break; } return ; }} @@ -212,7 +228,10 @@ class Router extends React.Component { + + + diff --git a/frontend/app/api_client.js b/frontend/app/api_client.js index 0e4699359..b60c0dbb7 100644 --- a/frontend/app/api_client.js +++ b/frontend/app/api_client.js @@ -11,6 +11,7 @@ const siteIdRequiredPaths = [ '/metadata', '/integrations/sentry/events', '/integrations/slack/notify', + '/integrations/msteams/notify', '/assignments', '/integration/sources', '/issue_types', @@ -94,7 +95,16 @@ export default class APIClient { ) { edp = `${ edp }/${ this.siteId }` } - return fetch(edp + path, this.init); + return fetch(edp + path, this.init) + .then(response => { + if (response.ok) { + return response + } else { + throw new Error( + `! ${this.init.method} error on ${path}; ${response.status}` + ) + } + }) } get(path, params, options) { diff --git a/frontend/app/assets/integrations/teams.svg b/frontend/app/assets/integrations/teams.svg new file mode 100644 index 000000000..e93adb2b8 --- /dev/null +++ b/frontend/app/assets/integrations/teams.svg @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/app/components/Alerts/AlertForm.js b/frontend/app/components/Alerts/AlertForm.js index 6604574e0..2d9f26027 100644 --- a/frontend/app/components/Alerts/AlertForm.js +++ b/frontend/app/components/Alerts/AlertForm.js @@ -1,6 +1,5 @@ import React, { useEffect } from 'react'; -import { Button, Form, Input, SegmentSelection, Checkbox, Message, Link, Icon } from 'UI'; -import { alertMetrics as metrics } from 'App/constants'; +import { Button, Form, Input, SegmentSelection, Checkbox, Icon } from 'UI'; import { alertConditions as conditions } from 'App/constants'; import { client, CLIENT_TABS } from 'App/routes'; import { connect } from 'react-redux'; @@ -12,319 +11,381 @@ import { fetchTriggerOptions } from 'Duck/alerts'; import Select from 'Shared/Select'; const thresholdOptions = [ - { label: '15 minutes', value: 15 }, - { label: '30 minutes', value: 30 }, - { label: '1 hour', value: 60 }, - { label: '2 hours', value: 120 }, - { label: '4 hours', value: 240 }, - { label: '1 day', value: 1440 }, + { label: '15 minutes', value: 15 }, + { label: '30 minutes', value: 30 }, + { label: '1 hour', value: 60 }, + { label: '2 hours', value: 120 }, + { label: '4 hours', value: 240 }, + { label: '1 day', value: 1440 }, ]; const changeOptions = [ - { label: 'change', value: 'change' }, - { label: '% change', value: 'percent' }, + { label: 'change', value: 'change' }, + { label: '% change', value: 'percent' }, ]; -const Circle = ({ text }) =>
{text}
; +const Circle = ({ text }) => ( +
+ {text} +
+); const Section = ({ index, title, description, content }) => ( -
-
- -
- {title} - {description &&
{description}
} -
-
- -
{content}
+
+
+ +
+ {title} + {description &&
{description}
} +
+ +
{content}
+
); const integrationsRoute = client(CLIENT_TABS.INTEGRATIONS); const AlertForm = (props) => { - const { - instance, - slackChannels, - webhooks, - loading, - onDelete, - deleting, - triggerOptions, - metricId, - style = { width: '580px', height: '100vh' }, - } = props; - const write = ({ target: { value, name } }) => props.edit({ [name]: value }); - const writeOption = (e, { name, value }) => props.edit({ [name]: value.value }); - const onChangeCheck = ({ target: { checked, name } }) => props.edit({ [name]: checked }); - // const onChangeOption = ({ checked, name }) => props.edit({ [ name ]: checked }) - // const onChangeCheck = (e) => { console.log(e) } + const { + instance, + slackChannels, + msTeamsChannels, + webhooks, + loading, + onDelete, + deleting, + triggerOptions, + style = { width: '580px', height: '100vh' }, + } = props; + const write = ({ target: { value, name } }) => props.edit({ [name]: value }); + const writeOption = (e, { name, value }) => props.edit({ [name]: value.value }); + const onChangeCheck = ({ target: { checked, name } }) => props.edit({ [name]: checked }); + // const onChangeOption = ({ checked, name }) => props.edit({ [ name ]: checked }) + // const onChangeCheck = (e) => { console.log(e) } - useEffect(() => { - props.fetchTriggerOptions(); - }, []); + useEffect(() => { + props.fetchTriggerOptions(); + }, []); - const writeQueryOption = (e, { name, value }) => { - const { query } = instance; - props.edit({ query: { ...query, [name]: value } }); - }; + const writeQueryOption = (e, { name, value }) => { + const { query } = instance; + props.edit({ query: { ...query, [name]: value } }); + }; - const writeQuery = ({ target: { value, name } }) => { - const { query } = instance; - props.edit({ query: { ...query, [name]: value } }); - }; + const writeQuery = ({ target: { value, name } }) => { + const { query } = instance; + props.edit({ query: { ...query, [name]: value } }); + }; - const metric = instance && instance.query.left ? triggerOptions.find((i) => i.value === instance.query.left) : null; - const unit = metric ? metric.unit : ''; - const isThreshold = instance.detectionMethod === 'threshold'; + const metric = + instance && instance.query.left + ? triggerOptions.find((i) => i.value === instance.query.left) + : null; + const unit = metric ? metric.unit : ''; + const isThreshold = instance.detectionMethod === 'threshold'; - return ( -
props.onSubmit(instance)} id="alert-form"> -
- -
-
- props.edit({ [name]: value })} - value={{ value: instance.detectionMethod }} - list={[ - { name: 'Threshold', value: 'threshold' }, - { name: 'Change', value: 'change' }, - ]} - /> -
- {isThreshold && 'Eg. Alert me if memory.avg is greater than 500mb over the past 4 hours.'} - {!isThreshold && - 'Eg. Alert me if % change of memory.avg is greater than 10% over the past 4 hours compared to the previous 4 hours.'} -
-
-
- } - /> - -
- -
- {!isThreshold && ( -
- - i.value === instance.query.left)} - // onChange={ writeQueryOption } - onChange={({ value }) => writeQueryOption(null, { name: 'left', value: value.value })} - /> -
- -
- -
- - {'test'} - - )} - {!unit && ( - - )} -
-
- -
- - writeOption(null, { name: 'previousPeriod', value })} - /> -
- )} -
- } - /> - -
- -
-
- - - -
- - {instance.slack && ( -
- -
- props.edit({ slackInput: selected })} - /> -
-
- )} - - {instance.email && ( -
- -
- props.edit({ emailInput: selected })} - /> -
-
- )} - - {instance.webhook && ( -
- - props.edit({ webhookInput: selected })} - /> -
- )} -
- } - /> + return ( + props.onSubmit(instance)} + id="alert-form" + > +
+ +
+
+ props.edit({ [name]: value })} + value={{ value: instance.detectionMethod }} + list={[ + { name: 'Threshold', value: 'threshold' }, + { name: 'Change', value: 'change' }, + ]} + /> +
+ {isThreshold && + 'Eg. Alert me if memory.avg is greater than 500mb over the past 4 hours.'} + {!isThreshold && + 'Eg. Alert me if % change of memory.avg is greater than 10% over the past 4 hours compared to the previous 4 hours.'} +
+
+ } + /> -
-
- -
- +
+ +
+ {!isThreshold && ( +
+ + i.value === instance.query.left)} + // onChange={ writeQueryOption } + onChange={({ value }) => + writeQueryOption(null, { name: 'left', value: value.value }) + } + /> +
+ +
+ +
+ + {'test'} + + )} + {!unit && ( + + )}
+
+ +
+ + writeOption(null, { name: 'previousPeriod', value })} + /> +
+ )}
- - ); + } + /> + +
+ +
+
+ + + + +
+ + {instance.slack && ( +
+ +
+ props.edit({ slackInput: selected })} + /> +
+
+ )} + {instance.msteams && ( +
+ +
+ props.edit({ msteamsInput: selected })} + /> +
+
+ )} + + {instance.email && ( +
+ +
+ props.edit({ emailInput: selected })} + /> +
+
+ )} + + {instance.webhook && ( +
+ + props.edit({ webhookInput: selected })} + /> +
+ )} +
+ } + /> +
+ +
+
+ +
+ +
+
+ {instance.exists() && ( + + )} +
+
+ + ); }; export default connect( - (state) => ({ - instance: state.getIn(['alerts', 'instance']), - triggerOptions: state.getIn(['alerts', 'triggerOptions']), - loading: state.getIn(['alerts', 'saveRequest', 'loading']), - deleting: state.getIn(['alerts', 'removeRequest', 'loading']), - }), - { fetchTriggerOptions } + (state) => ({ + instance: state.getIn(['alerts', 'instance']), + triggerOptions: state.getIn(['alerts', 'triggerOptions']), + loading: state.getIn(['alerts', 'saveRequest', 'loading']), + deleting: state.getIn(['alerts', 'removeRequest', 'loading']), + }), + { fetchTriggerOptions } )(AlertForm); diff --git a/frontend/app/components/Alerts/AlertItem.js b/frontend/app/components/Alerts/AlertItem.js index 9dbb204b8..76431bf77 100644 --- a/frontend/app/components/Alerts/AlertItem.js +++ b/frontend/app/components/Alerts/AlertItem.js @@ -17,6 +17,8 @@ const AlertItem = props => { const getNotifyChannel = alert => { let str = ''; + if (alert.msteams) + str = 'MS Teams' if (alert.slack) str = 'Slack'; if (alert.email) @@ -36,7 +38,7 @@ const AlertItem = props => { className={cn(stl.wrapper, 'p-4 py-6 relative group cursor-pointer', { [stl.active]: active })} onClick={onEdit} id="alert-item" - > + >
{alert.name}
diff --git a/frontend/app/components/Assist/Assist.tsx b/frontend/app/components/Assist/Assist.tsx index 3ef99c573..d46dcbdd0 100644 --- a/frontend/app/components/Assist/Assist.tsx +++ b/frontend/app/components/Assist/Assist.tsx @@ -1,26 +1,67 @@ import React from 'react'; -import LiveSessionList from 'Shared/LiveSessionList'; -import LiveSessionSearch from 'Shared/LiveSessionSearch'; -import cn from 'classnames' +import { withRouter, RouteComponentProps } from 'react-router-dom'; import withPageTitle from 'HOCs/withPageTitle'; -import withPermissions from 'HOCs/withPermissions' -// import SessionSearch from '../shared/SessionSearch'; -// import MainSearchBar from '../shared/MainSearchBar'; -import AssistSearchField from './AssistSearchField'; +import withPermissions from 'HOCs/withPermissions'; +import AssistRouter from './AssistRouter'; +import { SideMenuitem } from 'UI'; +import { withSiteId, assist, recordings } from 'App/routes'; +import { connect } from 'react-redux'; -function Assist() { - return ( -
+interface Props extends RouteComponentProps { + siteId: string; + history: any; + isEnterprise: boolean; +} + +function Assist(props: Props) { + const { history, siteId, isEnterprise } = props; + const isAssist = history.location.pathname.includes('assist'); + const isRecords = history.location.pathname.includes('recordings'); + + const redirect = (path: string) => { + history.push(withSiteId(path, siteId)); + }; + if (isEnterprise) { + return ( +
-
- - -
- +
+ redirect(assist())} + /> + redirect(recordings())} + /> +
+
+
+
+ ); + } + + return ( +
+
) } -export default withPageTitle("Assist - OpenReplay")(withPermissions(['ASSIST_LIVE'])(Assist)); +const Cont = connect((state: any) => ({ + isEnterprise: + state.getIn(['user', 'account', 'edition']) === 'ee' || + state.getIn(['user', 'authDetails', 'edition']) === 'ee', +}))(Assist); + +export default withPageTitle('Assist - OpenReplay')( + withPermissions(['ASSIST_LIVE'])(withRouter(Cont)) +); diff --git a/frontend/app/components/Assist/AssistRouter.tsx b/frontend/app/components/Assist/AssistRouter.tsx new file mode 100644 index 000000000..941362dbf --- /dev/null +++ b/frontend/app/components/Assist/AssistRouter.tsx @@ -0,0 +1,39 @@ +import React from 'react'; +import { Switch, Route } from 'react-router'; +import { withRouter, RouteComponentProps } from 'react-router-dom'; + +import { + assist, + recordings, + withSiteId, +} from 'App/routes'; +import AssistView from './AssistView' +import Recordings from './RecordingsList/Recordings' + +interface Props extends RouteComponentProps { + match: any; +} + +function AssistRouter(props: Props) { + const { + match: { + params: { siteId }, + }, + } = props; + + return ( +
+ + + + + + + + + +
+ ); +} + +export default withRouter(AssistRouter); diff --git a/frontend/app/components/Assist/AssistSearchField/AssistSearchField.tsx b/frontend/app/components/Assist/AssistSearchField/AssistSearchField.tsx index a8c04a98a..df53e4404 100644 --- a/frontend/app/components/Assist/AssistSearchField/AssistSearchField.tsx +++ b/frontend/app/components/Assist/AssistSearchField/AssistSearchField.tsx @@ -16,7 +16,7 @@ function AssistSearchField(props: Props) { const hasEvents = props.appliedFilter.filters.filter((i: any) => i.isEvent).size > 0; const hasFilters = props.appliedFilter.filters.filter((i: any) => !i.isEvent).size > 0; return ( -
+
diff --git a/frontend/app/components/Assist/AssistView.tsx b/frontend/app/components/Assist/AssistView.tsx new file mode 100644 index 000000000..36e7b7137 --- /dev/null +++ b/frontend/app/components/Assist/AssistView.tsx @@ -0,0 +1,17 @@ +import React from 'react'; +import LiveSessionList from 'Shared/LiveSessionList'; +import LiveSessionSearch from 'Shared/LiveSessionSearch'; +import AssistSearchField from './AssistSearchField'; + +function AssistView() { + return ( +
+ + +
+ +
+ ) +} + +export default AssistView; diff --git a/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx b/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx index 167db8281..29e4826cc 100644 --- a/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx +++ b/frontend/app/components/Assist/ChatWindow/ChatWindow.tsx @@ -6,7 +6,7 @@ import stl from './chatWindow.module.css'; import ChatControls from '../ChatControls/ChatControls'; import Draggable from 'react-draggable'; import type { LocalStream } from 'Player'; -import { toggleVideoLocalStream } from 'Player' +import { PlayerContext } from 'App/components/Session/playerContext'; export interface Props { incomeStream: MediaStream[] | null; @@ -17,6 +17,10 @@ export interface Props { } function ChatWindow({ userId, incomeStream, localStream, endCall, isPrestart }: Props) { + const { player } = React.useContext(PlayerContext) + + const toggleVideoLocalStream = player.assistManager.toggleVideoLocalStream; + const [localVideoEnabled, setLocalVideoEnabled] = useState(false); const [anyRemoteEnabled, setRemoteEnabled] = useState(false); diff --git a/frontend/app/components/Assist/RecordingsList/EditRecordingModal.tsx b/frontend/app/components/Assist/RecordingsList/EditRecordingModal.tsx new file mode 100644 index 000000000..902fff4f9 --- /dev/null +++ b/frontend/app/components/Assist/RecordingsList/EditRecordingModal.tsx @@ -0,0 +1,103 @@ +import { useObserver } from 'mobx-react-lite'; +import React from 'react'; +import { Button, Modal, Form, Icon, Checkbox, Input } from 'UI'; + +interface Props { + show: boolean; + title: string; + closeHandler?: () => void; + onSave: (title: string) => void; +} +function EditRecordingModal(props: Props) { + const { show, closeHandler, title, onSave } = props; + const [text, setText] = React.useState(title) + + React.useEffect(() => { + const handleEsc = (e: any) => e.key === 'Escape' && closeHandler?.() + document.addEventListener("keydown", handleEsc, false); + return () => { + document.removeEventListener("keydown", handleEsc, false); + } + }, []) + + const write = ({ target: { value, name } }: any) => setText(value) + + const save = () => { + onSave(text) + } + return useObserver(() => ( + + +
{ 'Edit Recording' }
+
+ +
+
+ + +
+ + + + + + {/* + + + + + +
+ dashboard.update({ 'isPublic': !dashboard.isPublic }) } + /> +
dashboard.update({ 'isPublic': !dashboard.isPublic }) }> + + Team can see and edit the dashboard. +
+
+
*/} +
+
+ +
+ + +
+
+
+ )); +} + +export default EditRecordingModal; diff --git a/frontend/app/components/Assist/RecordingsList/Recordings.tsx b/frontend/app/components/Assist/RecordingsList/Recordings.tsx new file mode 100644 index 000000000..c03b703f3 --- /dev/null +++ b/frontend/app/components/Assist/RecordingsList/Recordings.tsx @@ -0,0 +1,44 @@ +import React from 'react'; +import { PageTitle } from 'UI'; +import Select from 'Shared/Select'; +import RecordingsSearch from './RecordingsSearch'; +import RecordingsList from './RecordingsList'; +import { useStore } from 'App/mstore'; +import { connect } from 'react-redux'; + +function Recordings({ userId }: { userId: string }) { + const { recordingsStore } = useStore(); + + const recordingsOwner = [ + { value: '0', label: 'All Recordings' }, + { value: userId, label: 'My Recordings' }, + ]; + + return ( +
+
+
+ +
+
+ +
+ ); +} + +export default observer(RecordingsSearch); diff --git a/frontend/app/components/Assist/RecordingsList/RecordsListItem.tsx b/frontend/app/components/Assist/RecordingsList/RecordsListItem.tsx new file mode 100644 index 000000000..987ab0fa2 --- /dev/null +++ b/frontend/app/components/Assist/RecordingsList/RecordsListItem.tsx @@ -0,0 +1,108 @@ +import React from 'react'; +import { Icon, ItemMenu, Tooltip } from 'UI'; +import { durationFromMs, formatTimeOrDate } from 'App/date'; +import { IRecord } from 'App/services/RecordingsService'; +import { useStore } from 'App/mstore'; +import { toast } from 'react-toastify'; +import cn from 'classnames'; +import EditRecordingModal from './EditRecordingModal' + +interface Props { + record: IRecord; +} + +function RecordsListItem(props: Props) { + const { record } = props; + const { recordingsStore, settingsStore } = useStore(); + const { timezone } = settingsStore.sessionSettings; + const [isEdit, setEdit] = React.useState(false); + const [recordingTitle, setRecordingTitle] = React.useState(record.name); + const inputRef = React.useRef(null); + + const onRecordClick = () => { + recordingsStore.fetchRecordingUrl(record.recordId).then((url) => { + window.open(url, '_blank'); + }); + }; + + React.useEffect(() => { + if (inputRef.current) { + inputRef.current.style.width = `${record.name.length}ch`; + } + }, [isEdit, inputRef.current]); + + const onDelete = () => { + recordingsStore.deleteRecording(record.recordId).then(() => { + recordingsStore.setRecordings( + recordingsStore.recordings.filter((rec) => rec.recordId !== record.recordId) + ); + toast.success('Recording deleted'); + }); + }; + + const menuItems = [{ icon: 'pencil', text: 'Rename', onClick: () => setEdit(true) }, { icon: 'trash', text: 'Delete', onClick: onDelete }]; + + const onSave = (title: string) => { + recordingsStore + .updateRecordingName(record.recordId, title) + .then(() => { + setRecordingTitle(title) + toast.success('Recording name updated'); + }) + .catch(() => toast.error("Couldn't update recording name")); + setEdit(false); + }; + + return ( +
+ +
+
+
+
+ +
+
+
+ {recordingTitle} +
+
{durationFromMs(record.duration)}
+
+
+
+
+
+
{record.createdBy}
+
+ {formatTimeOrDate(record.createdAt, timezone, true)} +
+
+
+
+
+ + +
Play Video
+
+
+ +
+
+
+
+ ); +} + +export default RecordsListItem; diff --git a/frontend/app/components/Assist/RequestingWindow/RequestingWindow.tsx b/frontend/app/components/Assist/RequestingWindow/RequestingWindow.tsx index 6d702acdb..25283bd3b 100644 --- a/frontend/app/components/Assist/RequestingWindow/RequestingWindow.tsx +++ b/frontend/app/components/Assist/RequestingWindow/RequestingWindow.tsx @@ -2,45 +2,78 @@ import React from 'react'; import { INDEXES } from 'App/constants/zindex'; import { connect } from 'react-redux'; import { Button, Loader, Icon } from 'UI'; -import { initiateCallEnd, releaseRemoteControl } from 'Player'; +import { PlayerContext } from 'App/components/Session/playerContext'; interface Props { userDisplayName: string; - type: WindowType; + getWindowType: () => WindowType | null; } export enum WindowType { Call, Control, + Record, +} + +enum Actions { + CallEnd, + ControlEnd, + RecordingEnd, } const WIN_VARIANTS = { [WindowType.Call]: { text: 'to accept the call', icon: 'call' as const, - action: initiateCallEnd, + action: Actions.CallEnd, + iconColor: 'teal', }, [WindowType.Control]: { text: 'to accept remote control request', icon: 'remote-control' as const, - action: releaseRemoteControl, + action: Actions.ControlEnd, + iconColor: 'teal', }, + [WindowType.Record]: { + text: 'to accept recording request', + icon: 'record-circle' as const, + iconColor: 'red', + action: Actions.RecordingEnd, + } }; -function RequestingWindow({ userDisplayName, type }: Props) { +function RequestingWindow({ userDisplayName, getWindowType }: Props) { + const windowType = getWindowType() + if (!windowType) return; + const { player } = React.useContext(PlayerContext) + + + const { + assistManager: { + initiateCallEnd, + releaseRemoteControl, + stopRecording, + } + } = player + + const actions = { + [Actions.CallEnd]: initiateCallEnd, + [Actions.ControlEnd]: releaseRemoteControl, + [Actions.RecordingEnd]: stopRecording, + } return (
- +
Waiting for {userDisplayName}
- {WIN_VARIANTS[type].text} + {WIN_VARIANTS[windowType].text} -
@@ -48,6 +81,6 @@ function RequestingWindow({ userDisplayName, type }: Props) { ); } -export default connect((state) => ({ +export default connect((state: any) => ({ userDisplayName: state.getIn(['sessions', 'current', 'userDisplayName']), }))(RequestingWindow); diff --git a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx index e377cd3ba..a474c9cc8 100644 --- a/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx +++ b/frontend/app/components/Assist/components/AssistActions/AssistActions.tsx @@ -3,15 +3,8 @@ import { Button, Tooltip } from 'UI'; import { connect } from 'react-redux'; import cn from 'classnames'; import { toggleChatWindow } from 'Duck/sessions'; -import { connectPlayer } from 'Player'; import ChatWindow from '../../ChatWindow'; -import { - callPeer, - setCallArgs, - requestReleaseRemoteControl, - toggleAnnotation, - toggleUserName, -} from 'Player'; +// state enums import { CallingState, ConnectionStatus, @@ -19,9 +12,12 @@ import { RequestLocalStream, } from 'Player'; import type { LocalStream } from 'Player'; +import { PlayerContext } from 'App/components/Session/playerContext'; +import { observer } from 'mobx-react-lite'; import { toast } from 'react-toastify'; import { confirm } from 'UI'; import stl from './AassistActions.module.css'; +import ScreenRecorder from 'App/components/Session_/ScreenRecorder/ScreenRecorder'; function onReject() { toast.info(`Call was rejected.`); @@ -48,17 +44,31 @@ interface Props { function AssistActions({ userId, - calling, - annotating, - peerConnectionStatus, - remoteControlStatus, hasPermission, isEnterprise, isCallActive, agentIds, - livePlay, userDisplayName, }: Props) { + const { player, store } = React.useContext(PlayerContext) + + const { + assistManager: { + call: callPeer, + setCallArgs, + requestReleaseRemoteControl, + toggleAnnotation, + }, + toggleUserName, + } = player + const { + calling, + annotating, + peerConnectionStatus, + remoteControl: remoteControlStatus, + livePlay, + } = store.get() + const [isPrestart, setPrestart] = useState(false); const [incomeStream, setIncomeStream] = useState([]); const [localStream, setLocalStream] = useState(null); @@ -175,6 +185,10 @@ function AssistActions({ )} + {/* @ts-ignore wtf? */} + {isEnterprise ? : null} +
+ {/* @ts-ignore */}
{ + (state: any) => { const permissions = state.getIn(['user', 'account', 'permissions']) || []; return { hasPermission: permissions.includes('ASSIST_CALL'), @@ -248,11 +262,5 @@ const con = connect( ); export default con( - connectPlayer((state) => ({ - calling: state.calling, - annotating: state.annotating, - remoteControlStatus: state.remoteControl, - peerConnectionStatus: state.peerConnectionStatus, - livePlay: state.livePlay, - }))(AssistActions) + observer(AssistActions) ); diff --git a/frontend/app/components/Assist/components/SessionList/SessionList.tsx b/frontend/app/components/Assist/components/SessionList/SessionList.tsx index 6de55c0d1..ad4eb4df5 100644 --- a/frontend/app/components/Assist/components/SessionList/SessionList.tsx +++ b/frontend/app/components/Assist/components/SessionList/SessionList.tsx @@ -56,7 +56,7 @@ function SessionList(props: Props) { {session.pageTitle}
)} - hideModal()} key={session.sessionId} session={session} /> + hideModal()} key={session.sessionId} session={session} />
))}
diff --git a/frontend/app/components/Client/Integrations/Integrations.tsx b/frontend/app/components/Client/Integrations/Integrations.tsx index 8080213b1..73572c9b0 100644 --- a/frontend/app/components/Client/Integrations/Integrations.tsx +++ b/frontend/app/components/Client/Integrations/Integrations.tsx @@ -26,11 +26,12 @@ import FetchDoc from './FetchDoc'; import ProfilerDoc from './ProfilerDoc'; import AxiosDoc from './AxiosDoc'; import AssistDoc from './AssistDoc'; -import { PageTitle, Loader } from 'UI'; +import { PageTitle } from 'UI'; import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG'; import withPageTitle from 'HOCs/withPageTitle'; import PiniaDoc from './PiniaDoc' import ZustandDoc from './ZustandDoc' +import MSTeams from './Teams' interface Props { fetch: (name: string, siteId: string) => void; @@ -133,6 +134,7 @@ const integrations = [ { title: 'Jira', slug: 'jira', category: 'Errors', icon: 'integrations/jira', component: }, { title: 'Github', slug: 'github', category: 'Errors', icon: 'integrations/github', component: }, { title: 'Slack', category: 'Errors', icon: 'integrations/slack', component: }, + { title: 'MS Teams', category: 'Errors', icon: 'integrations/teams', component: }, ], }, { diff --git a/frontend/app/components/Client/Integrations/Teams/TeamsAddForm.tsx b/frontend/app/components/Client/Integrations/Teams/TeamsAddForm.tsx new file mode 100644 index 000000000..04b8e9451 --- /dev/null +++ b/frontend/app/components/Client/Integrations/Teams/TeamsAddForm.tsx @@ -0,0 +1,114 @@ +import React from 'react'; +import { connect } from 'react-redux'; +import { edit, save, init, update, remove } from 'Duck/integrations/teams'; +import { Form, Input, Button, Message } from 'UI'; +import { confirm } from 'UI'; + +interface Props { + edit: (inst: any) => void; + save: (inst: any) => void; + init: (inst: any) => void; + update: (inst: any) => void; + remove: (id: string) => void; + onClose: () => void; + instance: any; + saving: boolean; + errors: any; +} + +class TeamsAddForm extends React.PureComponent { + componentWillUnmount() { + this.props.init({}); + } + + save = () => { + const instance = this.props.instance; + if (instance.exists()) { + this.props.update(this.props.instance); + } else { + this.props.save(this.props.instance); + } + }; + + remove = async (id: string) => { + if ( + await confirm({ + header: 'Confirm', + confirmButton: 'Yes, delete', + confirmation: `Are you sure you want to permanently delete this channel?`, + }) + ) { + this.props.remove(id); + } + }; + + write = ({ target: { name, value } }: { target: { name: string, value: string }}) => this.props.edit({ [name]: value }); + + render() { + const { instance, saving, errors, onClose } = this.props; + return ( +
+
+ + + + + + + + +
+
+ + + +
+ + +
+
+ + {errors && ( +
+ {errors.map((error: any) => ( + + {error} + + ))} +
+ )} +
+ ); + } +} + +export default connect( + (state: any) => ({ + instance: state.getIn(['teams', 'instance']), + saving: state.getIn(['teams', 'saveRequest', 'loading']), + errors: state.getIn(['teams', 'saveRequest', 'errors']), + }), + { edit, save, init, remove, update } +)(TeamsAddForm); diff --git a/frontend/app/components/Client/Integrations/Teams/TeamsChannelList.tsx b/frontend/app/components/Client/Integrations/Teams/TeamsChannelList.tsx new file mode 100644 index 000000000..9c5189705 --- /dev/null +++ b/frontend/app/components/Client/Integrations/Teams/TeamsChannelList.tsx @@ -0,0 +1,51 @@ +import React from 'react'; +import { connect } from 'react-redux'; +import { NoContent } from 'UI'; +import { remove, edit, init } from 'Duck/integrations/teams'; +import DocLink from 'Shared/DocLink/DocLink'; + +function TeamsChannelList(props: { list: any, edit: (inst: any) => any, onEdit: () => void }) { + const { list } = props; + + const onEdit = (instance: Record) => { + props.edit(instance); + props.onEdit(); + }; + + return ( +
+ +
+ Integrate MS Teams with OpenReplay and share insights with the rest of the team, directly from the recording page. +
+ +
+ } + size="small" + show={list.size === 0} + > + {list.map((c: any) => ( +
onEdit(c)} + > +
+
{c.name}
+
{c.endpoint}
+
+
+ ))} + +
+ ); +} + +export default connect( + (state: any) => ({ + list: state.getIn(['teams', 'list']), + }), + { remove, edit, init } +)(TeamsChannelList); diff --git a/frontend/app/components/Client/Integrations/Teams/index.tsx b/frontend/app/components/Client/Integrations/Teams/index.tsx new file mode 100644 index 000000000..4814697b8 --- /dev/null +++ b/frontend/app/components/Client/Integrations/Teams/index.tsx @@ -0,0 +1,55 @@ +import React, { useEffect } from 'react'; +import TeamsChannelList from './TeamsChannelList'; +import { fetchList, init } from 'Duck/integrations/teams'; +import { connect } from 'react-redux'; +import TeamsAddForm from './TeamsAddForm'; +import { Button } from 'UI'; + +interface Props { + onEdit?: (integration: any) => void; + istance: any; + fetchList: any; + init: any; +} +const MSTeams = (props: Props) => { + const [active, setActive] = React.useState(false); + + const onEdit = () => { + setActive(true); + }; + + const onNew = () => { + setActive(true); + props.init({}); + } + + useEffect(() => { + props.fetchList(); + }, []); + + return ( +
+ {active && ( +
+ setActive(false)} /> +
+ )} +
+
+

Microsoft Teams

+
+ +
+
+ ); +}; + +MSTeams.displayName = 'MSTeams'; + +export default connect( + (state: any) => ({ + istance: state.getIn(['teams', 'instance']), + }), + { fetchList, init } +)(MSTeams); diff --git a/frontend/app/components/Client/Users/components/UserListItem/UserListItem.tsx b/frontend/app/components/Client/Users/components/UserListItem/UserListItem.tsx index abae1cbe1..72b99929d 100644 --- a/frontend/app/components/Client/Users/components/UserListItem/UserListItem.tsx +++ b/frontend/app/components/Client/Users/components/UserListItem/UserListItem.tsx @@ -42,7 +42,7 @@ function UserListItem(props: Props) { onClick={editHandler} >
- {user.name} + {user.name} {/* {isEnterprise && } */}
diff --git a/frontend/app/components/Client/Webhooks/Webhooks.js b/frontend/app/components/Client/Webhooks/Webhooks.js index e005a893a..a87ac2298 100644 --- a/frontend/app/components/Client/Webhooks/Webhooks.js +++ b/frontend/app/components/Client/Webhooks/Webhooks.js @@ -16,7 +16,7 @@ function Webhooks(props) { const { webhooks, loading } = props; const { showModal, hideModal } = useModal(); - const noSlackWebhooks = webhooks.filter((hook) => hook.type !== 'slack'); + const noSlackWebhooks = webhooks.filter((hook) => hook.type === 'webhook'); useEffect(() => { props.fetchList(); }, []); diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertForm/NotifyHooks.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertForm/NotifyHooks.tsx index 921c7ba9b..15f00a0a3 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertForm/NotifyHooks.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertForm/NotifyHooks.tsx @@ -6,6 +6,7 @@ interface INotifyHooks { instance: Alert; onChangeCheck: (e: React.ChangeEvent) => void; slackChannels: Array; + msTeamsChannels: Array; validateEmail: (value: string) => boolean; edit: (data: any) => void; hooks: Array; @@ -16,20 +17,33 @@ function NotifyHooks({ onChangeCheck, slackChannels, validateEmail, + msTeamsChannels, hooks, edit, }: INotifyHooks) { return (
- + {slackChannels.length > 0 && ( + + )} + {msTeamsChannels.length > 0 && ( + + )} - +
)} + {instance.msteams && ( +
+ +
+ edit({ msteamsInput: selected })} + /> +
+
+ )} + {instance.email && (
- +
- +
, webhooks: Array) => { .map((channelId: number) => { return ( '#' + - webhooks.find((hook) => hook.webhookId === channelId && hook.type === 'slack').name + webhooks.find((hook) => hook.webhookId === channelId && hook.type === 'slack')?.name + ); + }) + .join(', ') + + ')' + ); + }; + const getMsTeamsChannels = () => { + return ( + ' (' + + alert.msteamsInput + .map((channelId: number) => { + return ( + webhooks.find((hook) => hook.webhookId === channelId && hook.type === 'msteams')?.name ); }) .join(', ') + @@ -39,7 +52,15 @@ const getNotifyChannel = (alert: Record, webhooks: Array) => { let str = ''; if (alert.slack) { str = 'Slack'; - str += alert.slackInput.length > 0 ? getSlackChannels() : ''; + if (alert.slackInput.length > 0) { + str += getSlackChannels(); + } + } + if (alert.msteams) { + str += (str === '' ? '' : ' and ') + 'MS Teams' + if (alert.msteamsInput.length > 0) { + str += getMsTeamsChannels(); + } } if (alert.email) { str += (str === '' ? '' : ' and ') + (alert.emailInput.length > 1 ? 'Emails' : 'Email'); diff --git a/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx b/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx index 6027646f7..335bf8295 100644 --- a/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx @@ -5,7 +5,7 @@ import { validateEmail } from 'App/validate'; import { fetchTriggerOptions, init, edit, save, remove, fetchList } from 'Duck/alerts'; import { confirm } from 'UI'; import { toast } from 'react-toastify'; -import { SLACK, WEBHOOK } from 'App/constants/schedule'; +import { SLACK, WEBHOOK, TEAMS } from 'App/constants/schedule'; import { fetchList as fetchWebhooks } from 'Duck/webhook'; import Breadcrumb from 'Shared/Breadcrumb'; import { withSiteId, alerts } from 'App/routes'; @@ -48,6 +48,11 @@ const Section = ({ index, title, description, content }: ISection) => (
); +interface Select { + label: string; + value: string | number +} + interface IProps extends RouteComponentProps { siteId: string; instance: Alert; @@ -143,17 +148,22 @@ const NewAlert = (props: IProps) => { }); }; - const slackChannels = webhooks - .filter((hook) => hook.type === SLACK) - .map(({ webhookId, name }) => ({ value: webhookId, label: name })) - // @ts-ignore - .toJS(); + const slackChannels: Select[] = [] + const hooks: Select[] = [] + const msTeamsChannels: Select[] = [] - const hooks = webhooks - .filter((hook) => hook.type === WEBHOOK) - .map(({ webhookId, name }) => ({ value: webhookId, label: name })) - // @ts-ignore - .toJS(); + webhooks.forEach((hook) => { + const option = { value: hook.webhookId, label: hook.name } + if (hook.type === SLACK) { + slackChannels.push(option) + } + if (hook.type === WEBHOOK) { + hooks.push(option) + } + if (hook.type === TEAMS) { + msTeamsChannels.push(option) + } + }) const writeQueryOption = ( e: React.ChangeEvent, @@ -253,6 +263,7 @@ const NewAlert = (props: IProps) => { instance={instance} onChangeCheck={onChangeCheck} slackChannels={slackChannels} + msTeamsChannels={msTeamsChannels} validateEmail={validateEmail} hooks={hooks} edit={edit} diff --git a/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx b/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx index e610d040c..e1653c947 100644 --- a/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx +++ b/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx @@ -125,7 +125,7 @@ function DashboardView(props: Props) { + {dashboard?.name} } diff --git a/frontend/app/components/Dashboard/components/WidgetName/WidgetName.tsx b/frontend/app/components/Dashboard/components/WidgetName/WidgetName.tsx index 3f352165a..a560235e9 100644 --- a/frontend/app/components/Dashboard/components/WidgetName/WidgetName.tsx +++ b/frontend/app/components/Dashboard/components/WidgetName/WidgetName.tsx @@ -64,12 +64,12 @@ function WidgetName(props: Props) { /> ) : ( // @ts-ignore - -
setEditing(true)} + +
setEditing(true)} className={ cn( - "text-2xl h-8 flex items-center border-transparent", + "text-2xl h-8 flex items-center border-transparent", canEdit && 'cursor-pointer select-none border-b border-b-borderColor-transparent hover:border-dotted hover:border-gray-medium' ) } @@ -77,7 +77,7 @@ function WidgetName(props: Props) { { name }
- + )} { canEdit &&
setEditing(true)}>
}
diff --git a/frontend/app/components/Errors/List/List.js b/frontend/app/components/Errors/List/List.js index 9f379319a..8a22a1f2d 100644 --- a/frontend/app/components/Errors/List/List.js +++ b/frontend/app/components/Errors/List/List.js @@ -25,7 +25,7 @@ const sortOptions = Object.entries(sortOptionsMap) @connect(state => ({ loading: state.getIn([ "errors", "loading" ]), - resolveToggleLoading: state.getIn(["errors", "resolve", "loading"]) || + resolveToggleLoading: state.getIn(["errors", "resolve", "loading"]) || state.getIn(["errors", "unresolve", "loading"]), ignoreLoading: state.getIn([ "errors", "ignore", "loading" ]), mergeLoading: state.getIn([ "errors", "merge", "loading" ]), @@ -54,19 +54,19 @@ export default class List extends React.PureComponent { } this.debounceFetch = debounce(this.props.editOptions, 1000); } - + componentDidMount() { this.props.applyFilter({ }); } check = ({ errorId }) => { const { checkedIds } = this.state; - const newCheckedIds = checkedIds.contains(errorId) - ? checkedIds.remove(errorId) + const newCheckedIds = checkedIds.contains(errorId) + ? checkedIds.remove(errorId) : checkedIds.add(errorId); this.setState({ checkedAll: newCheckedIds.size === this.props.list.size, - checkedIds: newCheckedIds + checkedIds: newCheckedIds }); } @@ -184,7 +184,7 @@ export default class List extends React.PureComponent { onClick={ this.unresolve } disabled={ someLoading || currentCheckedIds.size === 0} /> - } + } { status !== IGNORED && - } + }
- Sort By + Sort By - - - - - No Data -
- } - size="small" - show={filtered.length === 0} - > - - {filtered.map((l) => ( - - ))} - - - - - - ); - } -} diff --git a/frontend/app/components/Session_/Console/ConsoleRow/ConsoleRow.tsx b/frontend/app/components/Session_/Console/ConsoleRow/ConsoleRow.tsx index 85457d6b1..4c0096697 100644 --- a/frontend/app/components/Session_/Console/ConsoleRow/ConsoleRow.tsx +++ b/frontend/app/components/Session_/Console/ConsoleRow/ConsoleRow.tsx @@ -41,7 +41,7 @@ function ConsoleRow(props: Props) { )} {renderWithNL(lines.pop())}
- {canExpand && expanded && lines.map((l: any, i: number) =>
{l}
)} + {/* {canExpand && expanded && lines.map((l: any, i: number) =>
{l}
)} */}
jump(log.time)} />
diff --git a/frontend/app/components/Session_/Console/ConsoleRow/index.ts b/frontend/app/components/Session_/Console/ConsoleRow/index.ts deleted file mode 100644 index c9140d748..000000000 --- a/frontend/app/components/Session_/Console/ConsoleRow/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { default } from './ConsoleRow'; diff --git a/frontend/app/components/Session_/Console/console.module.css b/frontend/app/components/Session_/Console/console.module.css deleted file mode 100644 index 2da78f540..000000000 --- a/frontend/app/components/Session_/Console/console.module.css +++ /dev/null @@ -1,38 +0,0 @@ - -.message { - overflow-x: auto; - margin-left: 10px; - font-size: 13px; - overflow-x: auto; - &::-webkit-scrollbar { - height: 2px; - } -} - -.line { - font-family: 'Menlo', 'monaco', 'consolas', monospace; - /* margin-top: -1px; ??? */ - display: flex; - align-items: flex-start; - border-bottom: solid thin $gray-light-shade; - &:hover { - background-coor: $active-blue !important; - } -} - -.timestamp { - -} - -.activeRow { - background-color: $teal-light !important; -} - -.icon { - padding-top: 4px; - margin-right: 7px; -} - -.inactiveRow { - opacity: 0.5; -} \ No newline at end of file diff --git a/frontend/app/components/Session_/EventsBlock/EventSearch/EventSearch.js b/frontend/app/components/Session_/EventsBlock/EventSearch/EventSearch.js index f0c7efe06..3c3396e72 100644 --- a/frontend/app/components/Session_/EventsBlock/EventSearch/EventSearch.js +++ b/frontend/app/components/Session_/EventsBlock/EventSearch/EventSearch.js @@ -1,9 +1,13 @@ import React, { useEffect } from 'react' import { Input, Icon } from 'UI' -import { connectPlayer, toggleEvents, scale } from 'Player'; +import { PlayerContext } from 'App/components/Session/playerContext'; function EventSearch(props) { - const { onChange, clearSearch, value, header, toggleEvents, setActiveTab } = props; + const { player } = React.useContext(PlayerContext) + + const { onChange, clearSearch, value, header, setActiveTab } = props; + + const toggleEvents = () => player.toggleEvents() useEffect(() => { return () => { @@ -42,4 +46,4 @@ function EventSearch(props) { ) } -export default connectPlayer(() => ({}), { toggleEvents })(EventSearch) +export default EventSearch diff --git a/frontend/app/components/Session_/EventsBlock/EventsBlock.js b/frontend/app/components/Session_/EventsBlock/EventsBlock.js index 69ba51996..b3d978347 100644 --- a/frontend/app/components/Session_/EventsBlock/EventsBlock.js +++ b/frontend/app/components/Session_/EventsBlock/EventsBlock.js @@ -141,6 +141,7 @@ export default class EventsBlock extends React.Component { eventsIndex, filterOutNote, } = this.props; + const { query } = this.state; const _events = this.eventsList const isLastEvent = index === _events.size - 1; diff --git a/frontend/app/components/Session_/EventsBlock/NoteEvent.tsx b/frontend/app/components/Session_/EventsBlock/NoteEvent.tsx index 08e5631a9..676b1f901 100644 --- a/frontend/app/components/Session_/EventsBlock/NoteEvent.tsx +++ b/frontend/app/components/Session_/EventsBlock/NoteEvent.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { Icon } from 'UI'; -import { tagProps, iTag, Note } from 'App/services/NotesService'; +import { tagProps, Note } from 'App/services/NotesService'; import { formatTimeOrDate } from 'App/date'; import { useStore } from 'App/mstore'; import { observer } from 'mobx-react-lite'; @@ -9,7 +9,6 @@ import copy from 'copy-to-clipboard'; import { toast } from 'react-toastify'; import { session } from 'App/routes'; import { confirm } from 'UI'; -import { filterOutNote as filterOutTimelineNote } from 'Player'; import { TeamBadge } from 'Shared/SessionListContainer/components/Notes'; interface Props { @@ -24,7 +23,6 @@ function NoteEvent(props: Props) { const { settingsStore, notesStore } = useStore(); const { timezone } = settingsStore.sessionSettings; - console.log(props.noEdit); const onEdit = () => { props.onEdit({ isVisible: true, @@ -60,7 +58,6 @@ function NoteEvent(props: Props) { ) { notesStore.deleteNote(props.note.noteId).then((r) => { props.filterOutNote(props.note.noteId); - filterOutTimelineNote(props.note.noteId); toast.success('Note deleted'); }); } diff --git a/frontend/app/components/Session_/Exceptions/Exceptions.js b/frontend/app/components/Session_/Exceptions/Exceptions.js deleted file mode 100644 index 334e57688..000000000 --- a/frontend/app/components/Session_/Exceptions/Exceptions.js +++ /dev/null @@ -1,159 +0,0 @@ -import React from 'react'; -import { connect } from 'react-redux'; -import { getRE } from 'App/utils'; -import { - NoContent, - Loader, - Input, - ErrorItem, - SlideModal, - ErrorDetails, - ErrorHeader, - Link, - QuestionMarkHint, - Tabs, -} from 'UI'; -import { fetchErrorStackList } from 'Duck/sessions'; -import { connectPlayer, jump } from 'Player'; -import { error as errorRoute } from 'App/routes'; -import Autoscroll from '../Autoscroll'; -import BottomBlock from '../BottomBlock'; - -@connectPlayer((state) => ({ - logs: state.logListNow, - exceptions: state.exceptionsList, - // exceptionsNow: state.exceptionsListNow, -})) -@connect( - (state) => ({ - session: state.getIn(['sessions', 'current']), - errorStack: state.getIn(['sessions', 'errorStack']), - sourcemapUploaded: state.getIn(['sessions', 'sourcemapUploaded']), - loading: state.getIn(['sessions', 'fetchErrorStackList', 'loading']), - }), - { fetchErrorStackList } -) -export default class Exceptions extends React.PureComponent { - state = { - filter: '', - currentError: null, - }; - - onFilterChange = ({ target: { value } }) => this.setState({ filter: value }); - - setCurrentError = (err) => { - const { session } = this.props; - this.props.fetchErrorStackList(session.sessionId, err.errorId); - this.setState({ currentError: err }); - }; - closeModal = () => this.setState({ currentError: null }); - - render() { - const { exceptions, loading, errorStack, sourcemapUploaded } = this.props; - const { filter, currentError } = this.state; - const filterRE = getRE(filter, 'i'); - - const filtered = exceptions.filter((e) => filterRE.test(e.name) || filterRE.test(e.message)); - - // let lastIndex = -1; - // filtered.forEach((item, index) => { - // if ( - // this.props.exceptionsNow.length > 0 && - // item.time <= this.props.exceptionsNow[this.props.exceptionsNow.length - 1].time - // ) { - // lastIndex = index; - // } - // }); - - return ( - <> - -
- - {currentError.name} - - {currentError.function} -
-
{currentError.message}
-
- ) - } - isDisplayed={currentError != null} - content={ - currentError && ( -
- - - - - -
- ) - } - onClose={this.closeModal} - /> - - -
- Exceptions -
- -
- - - - Upload Source Maps{' '} - - and see source code context obtained from stack traces in their original form. - - } - /> -
-
- - - - {filtered.map((e, index) => ( - jump(e.time)} - error={e} - key={e.key} - // selected={lastIndex === index} - // inactive={index > lastIndex} - onErrorClick={(jsEvent) => { - jsEvent.stopPropagation(); - jsEvent.preventDefault(); - this.setCurrentError(e); - }} - /> - ))} - - - -
- - ); - } -} diff --git a/frontend/app/components/Session_/Exceptions/Exceptions.tsx b/frontend/app/components/Session_/Exceptions/Exceptions.tsx new file mode 100644 index 000000000..987d0f215 --- /dev/null +++ b/frontend/app/components/Session_/Exceptions/Exceptions.tsx @@ -0,0 +1,125 @@ +import React from 'react'; +import { connect } from 'react-redux'; +import { getRE } from 'App/utils'; +import { + NoContent, + Loader, + Input, + ErrorItem, + SlideModal, + ErrorDetails, + Link, + QuestionMarkHint, +} from 'UI'; +import { error as errorRoute } from 'App/routes'; +import Autoscroll from '../Autoscroll'; +import BottomBlock from '../BottomBlock'; +import { PlayerContext } from 'App/components/Session/playerContext'; +import { observer } from 'mobx-react-lite'; + +interface IProps { + loading: boolean; + sourcemapUploaded: boolean; + errorStack: Record; +} + +function Exceptions({ errorStack, sourcemapUploaded, loading }: IProps) { + const { player, store } = React.useContext(PlayerContext); + const { logListNow: logs, exceptionsList: exceptions } = store.get(); + const [filter, setFilter] = React.useState(''); + const [currentError, setCurrentErrorVal] = React.useState(null); + + const onFilterChange = ({ target: { value } }: any) => setFilter(value); + const closeModal = () => setCurrentErrorVal(null); + + const filterRE = getRE(filter, 'i'); + const filtered = exceptions.filter((e: any) => filterRE.test(e.name) || filterRE.test(e.message)); + + return ( + <> + +
+ + {currentError.name} + + {currentError.function} +
+
{currentError.message}
+
+ ) + } + isDisplayed={currentError != null} + content={ + currentError && ( +
+ + + + + +
+ ) + } + onClose={closeModal} + /> + + +
+ Exceptions +
+ +
+ + + + Upload Source Maps{' '} + + and see source code context obtained from stack traces in their original form. + + } + /> +
+
+ + + + {filtered.map((e: any, index) => ( + + player.jump(e.time)} error={e} /> + + ))} + + + +
+ + ); +} + +export default connect((state: any) => ({ + errorStack: state.getIn(['sessions', 'errorStack']), + sourcemapUploaded: state.getIn(['sessions', 'sourcemapUploaded']), + loading: state.getIn(['sessions', 'fetchErrorStackList', 'loading']), +}))(observer(Exceptions)); diff --git a/frontend/app/components/Session_/Fetch/Fetch.js b/frontend/app/components/Session_/Fetch/Fetch.DEPRECATED.js similarity index 100% rename from frontend/app/components/Session_/Fetch/Fetch.js rename to frontend/app/components/Session_/Fetch/Fetch.DEPRECATED.js diff --git a/frontend/app/components/Session_/GraphQL/GQLDetails.js b/frontend/app/components/Session_/GraphQL/GQLDetails.js index 4caba50a7..73eadb3ab 100644 --- a/frontend/app/components/Session_/GraphQL/GQLDetails.js +++ b/frontend/app/components/Session_/GraphQL/GQLDetails.js @@ -6,8 +6,6 @@ export default class GQLDetails extends React.PureComponent { render() { const { gql: { variables, response, duration, operationKind, operationName }, - nextClick, - prevClick, first = false, last = false, } = this.props; @@ -57,15 +55,6 @@ export default class GQLDetails extends React.PureComponent {
- -
- - -
); } diff --git a/frontend/app/components/Session_/GraphQL/GraphQL.js b/frontend/app/components/Session_/GraphQL/GraphQL.js deleted file mode 100644 index 8c601dba1..000000000 --- a/frontend/app/components/Session_/GraphQL/GraphQL.js +++ /dev/null @@ -1,178 +0,0 @@ -import React from 'react'; -import { NoContent, Input, SlideModal, CloseButton, Button } from 'UI'; -import { getRE } from 'App/utils'; -import { connectPlayer, pause, jump } from 'Player'; -import BottomBlock from '../BottomBlock'; -import TimeTable from '../TimeTable'; -import GQLDetails from './GQLDetails'; -import { renderStart } from 'Components/Session_/Network/NetworkContent'; - -function renderDefaultStatus() { - return '2xx-3xx'; -} - -export function renderName(r) { - return ( -
-
{r.operationName}
- -
- ); -} - -@connectPlayer((state) => ({ - list: state.graphqlList, - listNow: state.graphqlListNow, - time: state.time, - livePlay: state.livePlay, -})) -export default class GraphQL extends React.PureComponent { - state = { - filter: '', - filteredList: this.props.list, - filteredListNow: this.props.listNow, - current: null, - currentIndex: 0, - showFetchDetails: false, - hasNextError: false, - hasPreviousError: false, - lastActiveItem: 0, - }; - - static filterList(list, value) { - const filterRE = getRE(value, 'i'); - - return value - ? list.filter( - (r) => - filterRE.test(r.operationKind) || - filterRE.test(r.operationName) || - filterRE.test(r.variables) - ) - : list; - } - - onFilterChange = ({ target: { value } }) => { - const { list } = this.props; - const filtered = GraphQL.filterList(list, value); - this.setState({ filter: value, filteredList: filtered, currentIndex: 0 }); - }; - - setCurrent = (item, index) => { - if (!this.props.livePlay) { - pause(); - jump(item.time); - } - this.setState({ current: item, currentIndex: index }); - }; - - closeModal = () => this.setState({ current: null, showFetchDetails: false }); - - static getDerivedStateFromProps(nextProps, prevState) { - const { list } = nextProps; - if (nextProps.time) { - const filtered = GraphQL.filterList(list, prevState.filter); - let i = 0; - filtered.forEach((item, index) => { - if (item.time <= nextProps.time) { - i = index; - } - }); - - return { - lastActiveItem: i, - }; - } - } - - render() { - const { current, currentIndex, filteredList, lastActiveItem } = this.state; - - return ( - - -

GraphQL

-
- -
-
- } - isDisplayed={current != null} - content={ - current && ( - - ) - } - onClose={this.closeModal} - /> - - - GraphQL -
- -
-
- - - - {[ - { - label: 'Start', - width: 90, - render: renderStart, - }, - { - label: 'Status', - width: 70, - render: renderDefaultStatus, - }, - { - label: 'Type', - dataKey: 'operationKind', - width: 60, - }, - { - label: 'Name', - width: 240, - render: renderName, - }, - ]} - - - -
- - ); - } -} diff --git a/frontend/app/components/Session_/GraphQL/GraphQL.tsx b/frontend/app/components/Session_/GraphQL/GraphQL.tsx new file mode 100644 index 000000000..6f89a5ec1 --- /dev/null +++ b/frontend/app/components/Session_/GraphQL/GraphQL.tsx @@ -0,0 +1,174 @@ +import React, { useEffect } from 'react'; +import { NoContent, Input, SlideModal, CloseButton, Button } from 'UI'; +import { getRE } from 'App/utils'; +import BottomBlock from '../BottomBlock'; +import TimeTable from '../TimeTable'; +import GQLDetails from './GQLDetails'; +import { renderStart } from 'Components/Session_/Network/NetworkContent'; +import { PlayerContext } from 'App/components/Session/playerContext'; +import { observer } from 'mobx-react-lite'; + +function renderDefaultStatus() { + return '2xx-3xx'; +} + +export function renderName(r: Record) { + const { player } = React.useContext(PlayerContext); + + return ( +
+
{r.operationName}
+ +
+ ); +} + +function GraphQL() { + const { player, store } = React.useContext(PlayerContext); + + const { graphqlList: list, graphqlListNow: listNow, time, livePlay } = store.get(); + + const defaultState = { + filter: '', + filteredList: list, + filteredListNow: listNow, + // @ts-ignore + current: null, + currentIndex: 0, + showFetchDetails: false, + hasNextError: false, + hasPreviousError: false, + lastActiveItem: 0, + }; + + const [state, setState] = React.useState(defaultState); + + const filterList = (list: any, value: string) => { + const filterRE = getRE(value, 'i'); + + return value + ? list.filter( + (r: any) => + filterRE.test(r.operationKind) || + filterRE.test(r.operationName) || + filterRE.test(r.variables) + ) + : list; + }; + + const onFilterChange = ({ target: { value } }: React.ChangeEvent) => { + const filtered = filterList(list, value); + setState((prevState) => ({ + ...prevState, + filter: value, + filteredList: filtered, + currentIndex: 0, + })); + }; + + const setCurrent = (item: any, index: number) => { + if (!livePlay) { + player.pause(); + player.jump(item.time); + } + setState((prevState) => ({ ...prevState, current: item, currentIndex: index })); + }; + + const closeModal = () => + setState((prevState) => ({ ...prevState, current: null, showFetchDetails: false })); + + useEffect(() => { + const filtered = filterList(listNow, state.filter); + if (filtered.length !== lastActiveItem) { + setState((prevState) => ({ ...prevState, lastActiveItem: listNow.length })); + } + }, [time]); + + const { current, currentIndex, filteredList, lastActiveItem } = state; + + return ( + + +

GraphQL

+
+ +
+
+ } + isDisplayed={current != null} + content={ + current && ( + + ) + } + onClose={closeModal} + /> + + + GraphQL +
+ +
+
+ + + + {[ + { + label: 'Start', + width: 90, + render: renderStart, + }, + { + label: 'Status', + width: 70, + render: renderDefaultStatus, + }, + { + label: 'Type', + dataKey: 'operationKind', + width: 60, + }, + { + label: 'Name', + width: 240, + render: renderName, + }, + ]} + + + +
+ + ); +} + +export default observer(GraphQL); diff --git a/frontend/app/components/Session_/Inspector/index.js b/frontend/app/components/Session_/Inspector/index.js index f76834fee..1f13b847f 100644 --- a/frontend/app/components/Session_/Inspector/index.js +++ b/frontend/app/components/Session_/Inspector/index.js @@ -1,27 +1,27 @@ import React, { useEffect, useState, useRef } from 'react'; -import { toggleInspectorMode, markElement } from 'Player'; import ElementView from './ElementView'; import BottomBlock from '../BottomBlock'; -import stl from './inspector.module.css' +import stl from './inspector.module.css'; +import { PlayerContext } from 'App/components/Session/playerContext'; // TODO: refactor: use Layout from the Sessions and put everything there under the WebPlayer folder -// function onMount(element, setOpen) { // TODO: through the MobX -// element.setOpen = setOpen; -// } +export default function Inspector() { + const { player } = React.useContext(PlayerContext); + const toggleInspectorMode = player.toggleInspectorMode; + const markElement = player.mark; -export default function Inspector () { const [doc, setDoc] = useState(null); const [openChain, setOpenChain] = useState([]); const [selectedElement, _setSelectedElement] = useState(null); const selectedElementRef = useRef(selectedElement); - const setSelectedElement = elem => { + const setSelectedElement = (elem) => { selectedElementRef.current = elem; _setSelectedElement(elem); - } + }; - useEffect(() => { + useEffect(() => { const doc = toggleInspectorMode(true, ({ target }) => { const openChain = []; let currentTarget = target; @@ -33,9 +33,9 @@ export default function Inspector () { setSelectedElement(target); }); setDoc(doc); - setOpenChain([ doc.documentElement ]); + setOpenChain([doc.documentElement]); - const onKeyPress = e => { + const onKeyPress = (e) => { if (e.key === 'Backspace' || e.key === 'Delete') { const elem = selectedElementRef.current; if (elem !== null && elem.parentElement !== null) { @@ -43,30 +43,30 @@ export default function Inspector () { setSelectedElement(null); } } - } - window.addEventListener("keydown", onKeyPress); + }; + window.addEventListener('keydown', onKeyPress); return () => { toggleInspectorMode(false); - window.removeEventListener("keydown", onKeyPress); - } + window.removeEventListener('keydown', onKeyPress); + }; }, []); - if (!doc) return null; - return ( - + if (!doc) return null; + return ( + -
markElement(null) } className={stl.wrapper}> - markElement(null)} className={stl.wrapper}> + markElement(e)} />
- ); -} \ No newline at end of file + ); +} diff --git a/frontend/app/components/Session_/LongTasks/LongTasks.js b/frontend/app/components/Session_/LongTasks/LongTasks.DEPRECATED.js similarity index 95% rename from frontend/app/components/Session_/LongTasks/LongTasks.js rename to frontend/app/components/Session_/LongTasks/LongTasks.DEPRECATED.js index fd3b4cc17..2fa80fd01 100644 --- a/frontend/app/components/Session_/LongTasks/LongTasks.js +++ b/frontend/app/components/Session_/LongTasks/LongTasks.DEPRECATED.js @@ -40,12 +40,12 @@ export default class GraphQL extends React.PureComponent { const { filter, current } = this.state; const filterRE = getRE(filter, 'i'); const filtered = list - .filter(({ containerType, context, containerName = "", containerId = "", containerSrc="" }) => - filterRE.test(containerName) || + .filter(({ containerType, context, containerName = "", containerId = "", containerSrc="" }) => + filterRE.test(containerName) || filterRE.test(containerId) || filterRE.test(containerSrc) || filterRE.test(CONTEXTS[ context ]) || - filterRE.test(CONTAINER_TYPES[ containerType ])); + filterRE.test(CONTAINER_TYPES[ containerType ])); const lastIndex = filtered.filter(item => item.time <= time).length - 1; return ( @@ -64,7 +64,7 @@ export default class GraphQL extends React.PureComponent { - Learn more + Learn more about Long Tasks API } diff --git a/frontend/app/components/Session_/LongTasks/index.js b/frontend/app/components/Session_/LongTasks/index.js index 521f0e2a7..ace8262a0 100644 --- a/frontend/app/components/Session_/LongTasks/index.js +++ b/frontend/app/components/Session_/LongTasks/index.js @@ -1 +1 @@ -export { default } from './LongTasks'; +export { default } from './LongTasks.DEPRECATED'; diff --git a/frontend/app/components/Session_/Multiview/EmptyTile.tsx b/frontend/app/components/Session_/Multiview/EmptyTile.tsx new file mode 100644 index 000000000..e92657603 --- /dev/null +++ b/frontend/app/components/Session_/Multiview/EmptyTile.tsx @@ -0,0 +1,16 @@ +import React from 'react' +import { InactiveTab } from 'App/components/Session_/Player/Controls/AssistSessionsTabs'; + +function EmptyTile({ onClick }: { onClick: () => void }) { + return ( +
+ + Add Session +
+ ); +} + +export default EmptyTile; diff --git a/frontend/app/components/Session_/Multiview/Multiview.tsx b/frontend/app/components/Session_/Multiview/Multiview.tsx new file mode 100644 index 000000000..420df0602 --- /dev/null +++ b/frontend/app/components/Session_/Multiview/Multiview.tsx @@ -0,0 +1,137 @@ +import React from 'react'; +import { useStore } from 'App/mstore'; +import { BackLink } from 'UI'; +import { observer } from 'mobx-react-lite'; +import { connect } from 'react-redux'; +import { fetchSessions, customSetSessions } from 'Duck/liveSearch'; +import { useHistory, useParams } from 'react-router-dom'; +import { liveSession, assist, withSiteId, multiview } from 'App/routes'; +import AssistSessionsModal from 'App/components/Session_/Player/Controls/AssistSessionsModal'; +import { useModal } from 'App/components/Modal'; +import LivePlayer from 'App/components/Session/LivePlayer'; +import EmptyTile from './EmptyTile' +import SessionTileFooter from './SessionTileFooter' + +function Multiview({ + total, + fetchSessions, + siteId, + assistCredendials, + customSetSessions, +}: { + total: number; + customSetSessions: (data: any) => void; + fetchSessions: (filter: any) => void; + siteId: string; + assistCredendials: any; + list: Record[]; +}) { + const { showModal, hideModal } = useModal(); + + const { assistMultiviewStore } = useStore(); + const history = useHistory(); + // @ts-ignore + const { sessionsquery } = useParams(); + + const onSessionsChange = (sessions: Record[]) => { + const sessionIdQuery = encodeURIComponent(sessions.map((s) => s.sessionId).join(',')); + return history.replace(withSiteId(multiview(sessionIdQuery), siteId)); + }; + + React.useEffect(() => { + assistMultiviewStore.setOnChange(onSessionsChange); + + if (sessionsquery) { + const sessionIds = decodeURIComponent(sessionsquery).split(','); + // preset + assistMultiviewStore.presetSessions(sessionIds).then((data) => { + customSetSessions(data); + }); + } else { + fetchSessions({}); + } + }, []); + + const openLiveSession = (e: React.MouseEvent, sessionId: string) => { + e.stopPropagation(); + assistMultiviewStore.setActiveSession(sessionId); + history.push(withSiteId(liveSession(sessionId)+'?multi=true', siteId)); + }; + + const returnToList = () => { + assistMultiviewStore.reset() + history.push(withSiteId(assist(), siteId)); + }; + + const openListModal = () => { + showModal(, { right: true }); + }; + + const replaceSession = (e: React.MouseEvent, sessionId: string) => { + e.stopPropagation(); + showModal(, { right: true }); + }; + + const deleteSession = (e: React.MouseEvent, sessionId: string) => { + e.stopPropagation(); + assistMultiviewStore.removeSession(sessionId); + }; + + const emptySpace = 4 - assistMultiviewStore.sessions.length; + + const placeholder = emptySpace > 0 ? new Array(emptySpace).fill(0) : [] + + return ( +
+
+
+ {/* @ts-ignore */} + +
+
{`Watching ${assistMultiviewStore.sessions.length} of ${total} Live Sessions`}
+
+
+ {assistMultiviewStore.sortedSessions.map((session: Record) => ( +
+
openLiveSession(e, session.sessionId)} className="w-full h-full"> + {session.agentToken ? ( + + ) : ( +
Loading session
+ )} +
+ +
+ ))} + {placeholder.map((_, i) => ( + + + + ))} +
+
+ ); +} + +export default connect( + (state: any) => ({ + total: state.getIn(['liveSearch', 'total']), + siteId: state.getIn(['site', 'siteId']), + }), + { + fetchSessions, + customSetSessions, + } +)(observer(Multiview)); diff --git a/frontend/app/components/Session_/Multiview/SessionTileFooter.tsx b/frontend/app/components/Session_/Multiview/SessionTileFooter.tsx new file mode 100644 index 000000000..da6c36ca5 --- /dev/null +++ b/frontend/app/components/Session_/Multiview/SessionTileFooter.tsx @@ -0,0 +1,36 @@ +import React from 'react' +import { Icon } from 'UI' + +function SessionTileFooter({ + userDisplayName, + sessionId, + replaceSession, + deleteSession, +}: { + userDisplayName: string; + sessionId: string; + replaceSession: (e: any, id: string) => void; + deleteSession: (e: any, id: string) => void; +}) { + return ( +
+
{userDisplayName}
+
+
replaceSession(e, sessionId)} + > + Replace Session +
+
deleteSession(e, sessionId)} + > + +
+
+
+ ); +} + +export default SessionTileFooter; diff --git a/frontend/app/components/Session_/Network/Network.js b/frontend/app/components/Session_/Network/Network.DEPRECATED.js similarity index 97% rename from frontend/app/components/Session_/Network/Network.js rename to frontend/app/components/Session_/Network/Network.DEPRECATED.js index f824cba09..ce09e94d2 100644 --- a/frontend/app/components/Session_/Network/Network.js +++ b/frontend/app/components/Session_/Network/Network.DEPRECATED.js @@ -1,7 +1,7 @@ import React from 'react'; import cn from 'classnames'; -import { connectPlayer, jump, pause } from 'Player'; -import { Tooltip, Button, TextEllipsis } from 'UI'; +import { connectPlayer, } from 'Player'; +import { Tooltip, TextEllipsis } from 'UI'; import { getRE } from 'App/utils'; import { TYPES } from 'Types/session/resource'; import stl from './network.module.css'; diff --git a/frontend/app/components/Session_/Network/NetworkContent.js b/frontend/app/components/Session_/Network/NetworkContent.js index 5f335dbf3..2891a0597 100644 --- a/frontend/app/components/Session_/Network/NetworkContent.js +++ b/frontend/app/components/Session_/Network/NetworkContent.js @@ -1,6 +1,5 @@ import React from 'react'; import cn from 'classnames'; -// import { connectPlayer } from 'Player'; import { QuestionMarkHint, Tooltip, Tabs, Input, NoContent, Icon, Toggler, Button } from 'UI'; import { getRE } from 'App/utils'; import { TYPES } from 'Types/session/resource'; @@ -12,7 +11,6 @@ import BottomBlock from '../BottomBlock'; import InfoLine from '../BottomBlock/InfoLine'; import stl from './network.module.css'; import { Duration } from 'luxon'; -import { jump } from 'Player'; const ALL = 'ALL'; const XHR = 'xhr'; @@ -112,8 +110,6 @@ function renderSize(r) { content = 'Not captured'; } else { const headerSize = r.headerSize || 0; - const encodedSize = r.encodedBodySize || 0; - const transferred = headerSize + encodedSize; const showTransferred = r.headerSize != null; triggerText = formatBytes(r.decodedBodySize); @@ -234,7 +230,6 @@ export default class NetworkContent extends React.PureComponent { className="input-small" placeholder="Filter by name" icon="search" - iconPosition="left" name="filter" onChange={this.onFilterChange} height={28} diff --git a/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx b/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx index de3cbdf5f..ef8598ddc 100644 --- a/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx +++ b/frontend/app/components/Session_/OverviewPanel/OverviewPanel.tsx @@ -1,6 +1,5 @@ -import { connectPlayer } from 'Player'; -import { toggleBottomBlock } from 'Duck/components/player'; import React, { useEffect } from 'react'; +import { toggleBottomBlock } from 'Duck/components/player'; import BottomBlock from '../BottomBlock'; import EventRow from './components/EventRow'; import { TYPES } from 'Types/session/event'; @@ -10,40 +9,39 @@ import FeatureSelection, { HELP_MESSAGE } from './components/FeatureSelection/Fe import TimelinePointer from './components/TimelinePointer'; import VerticalPointerLine from './components/VerticalPointerLine'; import cn from 'classnames'; -// import VerticalLine from './components/VerticalLine'; import OverviewPanelContainer from './components/OverviewPanelContainer'; import { NoContent, Icon } from 'UI'; +import { observer } from 'mobx-react-lite'; +import { PlayerContext } from 'App/components/Session/playerContext'; -interface Props { - resourceList: any[]; - exceptionsList: any[]; - eventsList: any[]; - toggleBottomBlock: any; - stackEventList: any[]; - issuesList: any[]; - performanceChartData: any; - endTime: number; - fetchPresented?: boolean; -} -function OverviewPanel(props: Props) { - const { fetchPresented = false } = props; +function OverviewPanel({ issuesList }: { issuesList: Record[] }) { + const { store } = React.useContext(PlayerContext) const [dataLoaded, setDataLoaded] = React.useState(false); const [selectedFeatures, setSelectedFeatures] = React.useState([ 'PERFORMANCE', 'ERRORS', - // 'EVENTS', 'NETWORK', ]); - const resources: any = React.useMemo(() => { const { - resourceList, - exceptionsList, - eventsList, - stackEventList, - issuesList, + endTime, performanceChartData, - } = props; + stackList: stackEventList, + eventList: eventsList, + exceptionsList, + resourceList: resourceListUnmap, + fetchList, + graphqlList, + } = store.get() + + const fetchPresented = fetchList.length > 0; + + const resourceList = resourceListUnmap + .filter((r: any) => r.isRed() || r.isYellow()) + .concat(fetchList.filter((i: any) => parseInt(i.status) >= 400)) + .concat(graphqlList.filter((i: any) => parseInt(i.status) >= 400)) + + const resources: any = React.useMemo(() => { return { NETWORK: resourceList, ERRORS: exceptionsList, @@ -59,25 +57,25 @@ function OverviewPanel(props: Props) { } if ( - props.resourceList.length > 0 || - props.exceptionsList.length > 0 || - props.eventsList.length > 0 || - props.stackEventList.length > 0 || - props.issuesList.length > 0 || - props.performanceChartData.length > 0 + resourceList.length > 0 || + exceptionsList.length > 0 || + eventsList.length > 0 || + stackEventList.length > 0 || + issuesList.length > 0 || + performanceChartData.length > 0 ) { setDataLoaded(true); } }, [ - props.resourceList, - props.exceptionsList, - props.eventsList, - props.stackEventList, - props.performanceChartData, + resourceList, + issuesList, + exceptionsList, + eventsList, + stackEventList, + performanceChartData, ]); return ( - X-RAY @@ -86,9 +84,10 @@ function OverviewPanel(props: Props) {
- - + +
@@ -118,7 +117,7 @@ function OverviewPanel(props: Props) { fetchPresented={fetchPresented} /> )} - endTime={props.endTime} + endTime={endTime} message={HELP_MESSAGE[feature]} />
@@ -128,7 +127,6 @@ function OverviewPanel(props: Props) {
- ); } @@ -140,20 +138,5 @@ export default connect( toggleBottomBlock, } )( - connectPlayer((state: any) => ({ - fetchPresented: state.fetchList.length > 0, - resourceList: state.resourceList - .filter((r: any) => r.isRed() || r.isYellow()) - .concat(state.fetchList.filter((i: any) => parseInt(i.status) >= 400)) - .concat(state.graphqlList.filter((i: any) => parseInt(i.status) >= 400)), - exceptionsList: state.exceptionsList, - eventsList: state.eventList, - stackEventList: state.stackList, - performanceChartData: state.performanceChartData, - endTime: state.endTime, - }))(OverviewPanel) -); - -const Wrapper = React.memo((props: any) => { - return <>{props.children}; -}); + observer(OverviewPanel) +) diff --git a/frontend/app/components/Session_/OverviewPanel/components/OverviewPanelContainer/OverviewPanelContainer.tsx b/frontend/app/components/Session_/OverviewPanel/components/OverviewPanelContainer/OverviewPanelContainer.tsx index e017fe1db..8b802953d 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/OverviewPanelContainer/OverviewPanelContainer.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/OverviewPanelContainer/OverviewPanelContainer.tsx @@ -1,48 +1,33 @@ import React from 'react'; import VerticalLine from '../VerticalLine'; -import { connectPlayer, Controls } from 'Player'; +import { PlayerContext } from 'App/components/Session/playerContext'; + interface Props { - children: React.ReactNode; - endTime: number; + children: React.ReactNode; + endTime: number; } const OverviewPanelContainer = React.memo((props: Props) => { - const { endTime } = props; - const [mouseX, setMouseX] = React.useState(0); - const [mouseIn, setMouseIn] = React.useState(false); - const onClickTrack = (e: any) => { - const p = e.nativeEvent.offsetX / e.target.offsetWidth; - const time = Math.max(Math.round(p * endTime), 0); - if (time) { - Controls.jump(time); - } - }; + const { player } = React.useContext(PlayerContext) - // const onMouseMoveCapture = (e: any) => { - // if (!mouseIn) { - // return; - // } - // const p = e.nativeEvent.offsetX / e.target.offsetWidth; - // setMouseX(p * 100); - // }; + const { endTime } = props; + const [mouseX, setMouseX] = React.useState(0); + const [mouseIn, setMouseIn] = React.useState(false); + const onClickTrack = (e: any) => { + const p = e.nativeEvent.offsetX / e.target.offsetWidth; + const time = Math.max(Math.round(p * endTime), 0); + if (time) { + player.jump(time); + } + }; - return ( -
setMouseIn(true)} - // onMouseOut={() => setMouseIn(false)} - > - {mouseIn && } -
{props.children}
-
- ); + return ( +
+ {mouseIn && } +
{props.children}
+
+ ); }); export default OverviewPanelContainer; - -// export default connectPlayer((state: any) => ({ -// endTime: state.endTime, -// }))(OverviewPanelContainer); diff --git a/frontend/app/components/Session_/OverviewPanel/components/PerformanceGraph/PerformanceGraph.tsx b/frontend/app/components/Session_/OverviewPanel/components/PerformanceGraph/PerformanceGraph.tsx index 8da42f303..2e719f377 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/PerformanceGraph/PerformanceGraph.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/PerformanceGraph/PerformanceGraph.tsx @@ -1,6 +1,5 @@ import React from 'react'; -import { connectPlayer } from 'Player'; -import { AreaChart, Area, Tooltip, ResponsiveContainer } from 'recharts'; +import { AreaChart, Area, ResponsiveContainer } from 'recharts'; interface Props { list: any; diff --git a/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx b/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx index 5b6434794..206832329 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/TimelinePointer/TimelinePointer.tsx @@ -1,5 +1,4 @@ import React from 'react'; -import { Controls } from 'Player'; import { NETWORK, EXCEPTIONS } from 'Duck/components/player'; import { useModal } from 'App/components/Modal'; import { Icon, Tooltip } from 'UI'; @@ -7,6 +6,7 @@ import StackEventModal from '../StackEventModal'; import ErrorDetailsModal from 'App/components/Dashboard/components/Errors/ErrorDetailsModal'; import FetchDetails from 'Shared/FetchDetailsModal'; import GraphQLDetailsModal from 'Shared/GraphQLDetailsModal'; +import { PlayerContext } from 'App/components/Session/playerContext'; interface Props { pointer: any; @@ -15,11 +15,13 @@ interface Props { fetchPresented?: boolean; } const TimelinePointer = React.memo((props: Props) => { + const { player } = React.useContext(PlayerContext) + const { showModal } = useModal(); const createEventClickHandler = (pointer: any, type: any) => (e: any) => { if (props.noClick) return; e.stopPropagation(); - Controls.jump(pointer.time); + player.jump(pointer.time); if (!type) { return; } diff --git a/frontend/app/components/Session_/OverviewPanel/components/TimelineScale/TimelineScale.tsx b/frontend/app/components/Session_/OverviewPanel/components/TimelineScale/TimelineScale.tsx index 3905f4538..f8a8ec2f1 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/TimelineScale/TimelineScale.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/TimelineScale/TimelineScale.tsx @@ -1,5 +1,4 @@ import React from 'react'; -import { connectPlayer } from 'Player'; import { millisToMinutesAndSeconds } from 'App/utils'; interface Props { @@ -17,9 +16,6 @@ function TimelineScale(props: Props) { for (var i = 0; i < part; i++) { const txt = millisToMinutesAndSeconds(i * (endTime / part)); const el = document.createElement('div'); - // el.style.height = '10px'; - // el.style.width = '1px'; - // el.style.backgroundColor = '#ccc'; el.style.position = 'absolute'; el.style.left = `${i * gap}px`; el.style.paddingTop = '1px'; @@ -38,23 +34,11 @@ function TimelineScale(props: Props) { } drawScale(scaleRef.current); - - // const resize = () => drawScale(scaleRef.current); - - // window.addEventListener('resize', resize); - // return () => { - // window.removeEventListener('resize', resize); - // }; }, [scaleRef]); return (
- {/*
*/}
); } export default TimelineScale; - -// export default connectPlayer((state: any) => ({ -// endTime: state.endTime, -// }))(TimelineScale); diff --git a/frontend/app/components/Session_/OverviewPanel/components/VerticalPointerLine/VerticalPointerLine.tsx b/frontend/app/components/Session_/OverviewPanel/components/VerticalPointerLine/VerticalPointerLine.tsx index 688e8364c..5f815efa6 100644 --- a/frontend/app/components/Session_/OverviewPanel/components/VerticalPointerLine/VerticalPointerLine.tsx +++ b/frontend/app/components/Session_/OverviewPanel/components/VerticalPointerLine/VerticalPointerLine.tsx @@ -1,18 +1,16 @@ import React from 'react'; -import { connectPlayer } from 'Player'; +import { PlayerContext } from 'App/components/Session/playerContext'; +import { observer } from 'mobx-react-lite'; import VerticalLine from '../VerticalLine'; -interface Props { - time?: number; - scale?: number; -} -function VerticalPointerLine(props: Props) { - const { time, scale } = props; +function VerticalPointerLine() { + const { store } = React.useContext(PlayerContext) + + const { time, endTime } = store.get(); + const scale = 100 / endTime; + const left = time * scale; return ; } -export default connectPlayer((state: any) => ({ - time: state.time, - scale: 100 / state.endTime, -}))(VerticalPointerLine); +export default observer(VerticalPointerLine); diff --git a/frontend/app/components/Session_/PageInsightsPanel/PageInsightsPanel.tsx b/frontend/app/components/Session_/PageInsightsPanel/PageInsightsPanel.tsx index 03d74a247..e1ba5ccba 100644 --- a/frontend/app/components/Session_/PageInsightsPanel/PageInsightsPanel.tsx +++ b/frontend/app/components/Session_/PageInsightsPanel/PageInsightsPanel.tsx @@ -3,7 +3,8 @@ import { Loader, Icon } from 'UI'; import { connect } from 'react-redux'; import { fetchInsights } from 'Duck/sessions'; import SelectorsList from './components/SelectorsList/SelectorsList'; -import { markTargets, Controls as Player } from 'Player'; +import { PlayerContext } from 'App/components/Session/playerContext'; + import Select from 'Shared/Select'; import SelectDateRange from 'Shared/SelectDateRange'; import Period from 'Types/app/period'; @@ -22,6 +23,9 @@ interface Props { } function PageInsightsPanel({ filters, fetchInsights, events = [], insights, urlOptions, host, loading = true, setActiveTab }: Props) { + const { player: Player } = React.useContext(PlayerContext) + const markTargets = (t: any) => Player.markTargets(t) + const [insightsFilters, setInsightsFilters] = useState(filters); const defaultValue = urlOptions && urlOptions[0] ? urlOptions[0].value : ''; diff --git a/frontend/app/components/Session_/PageInsightsPanel/components/SelectorCard/SelectorCard.tsx b/frontend/app/components/Session_/PageInsightsPanel/components/SelectorCard/SelectorCard.tsx index 999dae866..39de770b7 100644 --- a/frontend/app/components/Session_/PageInsightsPanel/components/SelectorCard/SelectorCard.tsx +++ b/frontend/app/components/Session_/PageInsightsPanel/components/SelectorCard/SelectorCard.tsx @@ -1,9 +1,9 @@ -import React, { useState } from 'react'; +import React from 'react'; import stl from './SelectorCard.module.css'; import cn from 'classnames'; import type { MarkedTarget } from 'Player'; -import { activeTarget } from 'Player'; import { Tooltip } from 'react-tippy'; +import { PlayerContext } from 'App/components/Session/playerContext'; interface Props { index?: number; @@ -12,7 +12,11 @@ interface Props { } export default function SelectorCard({ index = 1, target, showContent }: Props) { + const { player } = React.useContext(PlayerContext) + const activeTarget = player.setActiveTarget + return ( + // @ts-ignore TODO for Alex
activeTarget(index)}>
{/* @ts-ignore */} diff --git a/frontend/app/components/Session_/PageInsightsPanel/components/SelectorsList/SelectorsList.tsx b/frontend/app/components/Session_/PageInsightsPanel/components/SelectorsList/SelectorsList.tsx index 14080e718..183b9754e 100644 --- a/frontend/app/components/Session_/PageInsightsPanel/components/SelectorsList/SelectorsList.tsx +++ b/frontend/app/components/Session_/PageInsightsPanel/components/SelectorsList/SelectorsList.tsx @@ -1,26 +1,22 @@ import React from 'react'; import { NoContent } from 'UI'; -import { connectPlayer } from 'Player'; +import { PlayerContext } from 'App/components/Session/playerContext'; +import { observer } from 'mobx-react-lite'; import SelectorCard from '../SelectorCard/SelectorCard'; -import type { MarkedTarget } from 'Player'; import stl from './selectorList.module.css'; -interface Props { - targets: Array; - activeTargetIndex: number; -} +function SelectorsList() { + const { store } = React.useContext(PlayerContext) + + const { markedTargets: targets, activeTargetIndex } = store.get() -function SelectorsList({ targets, activeTargetIndex }: Props) { return (
- {targets && targets.map((target, index) => )} + {targets && targets.map((target, index) => )}
); } -export default connectPlayer((state: any) => ({ - targets: state.markedTargets, - activeTargetIndex: state.activeTargetIndex, -}))(SelectorsList); +export default observer(SelectorsList); diff --git a/frontend/app/components/Session_/Performance/Performance.tsx b/frontend/app/components/Session_/Performance/Performance.tsx index 994401141..eca011fcb 100644 --- a/frontend/app/components/Session_/Performance/Performance.tsx +++ b/frontend/app/components/Session_/Performance/Performance.tsx @@ -1,6 +1,7 @@ import React from 'react'; import { connect } from 'react-redux'; -import { Controls as PlayerControls, connectPlayer } from 'Player'; +import { PlayerContext } from 'App/components/Session/playerContext'; +import { observer } from 'mobx-react-lite'; import { AreaChart, Area, @@ -11,10 +12,8 @@ import { Tooltip, ResponsiveContainer, ReferenceLine, - CartesianGrid, Label, } from 'recharts'; -import { Checkbox } from 'UI'; import { durationFromMsFormatted } from 'App/date'; import { formatBytes } from 'App/utils'; @@ -174,362 +173,324 @@ function addFpsMetadata(data) { }); } -@connect((state) => ({ - userDeviceHeapSize: state.getIn(['sessions', 'current', 'userDeviceHeapSize']), - userDeviceMemorySize: state.getIn(['sessions', 'current', 'userDeviceMemorySize']), -})) -export default class Performance extends React.PureComponent { - _timeTicks = generateTicks(this.props.performanceChartData); - _data = addFpsMetadata(this.props.performanceChartData); - // state = { - // totalHeap: false, - // usedHeap: true, - // fps: true, - // } - // onCheckboxClick = (e, { name, checked }) => this.setState({ [ name ]: checked }) +function Performance({ + userDeviceHeapSize, +}: { + userDeviceHeapSize: number; +}) { + const { player, store } = React.useContext(PlayerContext); + const [_timeTicks, setTicks] = React.useState([]) + const [_data, setData] = React.useState([]) - onDotClick = ({ index }) => { - const point = this._data[index]; + const { + performanceChartTime, + performanceChartData, + connType, + connBandwidth, + performanceAvaliability: avaliability, + } = store.get(); + + React.useState(() => { + setTicks(generateTicks(performanceChartData)); + setData(addFpsMetadata(performanceChartData)); + }) + + + const onDotClick = ({ index: pointer }: { index: number }) => { + const point = _data[pointer]; if (!!point) { - PlayerControls.jump(point.time); + player.jump(point.time); } }; - onChartClick = (e) => { + const onChartClick = (e: any) => { if (e === null) return; const { activeTooltipIndex } = e; - const point = this._data[activeTooltipIndex]; + const point = _data[activeTooltipIndex]; if (!!point) { - PlayerControls.jump(point.time); + player.jump(point.time); } }; - render() { - const { - userDeviceHeapSize, - userDeviceMemorySize, - connType, - connBandwidth, - performanceChartTime, - avaliability = {}, - } = this.props; - const { fps, cpu, heap, nodes } = avaliability; - const avaliableCount = [fps, cpu, heap, nodes].reduce((c, av) => (av ? c + 1 : c), 0); - const height = avaliableCount === 0 ? '0' : `${100 / avaliableCount}%`; + const { fps, cpu, heap, nodes } = avaliability; + const avaliableCount = [fps, cpu, heap, nodes].reduce((c, av) => (av ? c + 1 : c), 0); + const height = avaliableCount === 0 ? '0' : `${100 / avaliableCount}%`; - return ( - - -
-
Performance
- - - {/* */} - - = 1000 ? `${connBandwidth / 1000} Mbps` : `${connBandwidth} Kbps` - } - display={connBandwidth != null} - /> - -
-
- - {fps && ( - - + +
+
Performance
+ + + + = 1000 ? `${connBandwidth / 1000} Mbps` : `${connBandwidth} Kbps` + } + display={connBandwidth != null} + /> + +
+
+ + {fps && ( + + + + + + - - - - {/* */} - {/* */} - - - - {/* */} - - - - - {/* */} - - - - - )} - {cpu && ( - - + + + + + + + + + + + )} + {cpu && ( + + + + + + {/* */} + ''} + domain={[0, 'dataMax']} + ticks={_timeTicks} > - - - - {/* */} - ''} - domain={[0, 'dataMax']} - ticks={this._timeTicks} - > - - - - - - - - - )} +
+
+ )} - {heap && ( - - + + + + + ''} // tick={false} + _timeTicks to cartesian array + domain={[0, 'dataMax']} + ticks={_timeTicks} > - - - - {/* */} - ''} // tick={false} + this._timeTicks to cartesian array - domain={[0, 'dataMax']} - ticks={this._timeTicks} - > - - max * 1.2]} - /> - - - - - - - )} - {nodes && ( - - + + max * 1.2]} + /> + + + + + + + )} + {nodes && ( + + + + + + ''} + domain={[0, 'dataMax']} + ticks={_timeTicks} > - - - - {/* */} - ''} - domain={[0, 'dataMax']} - ticks={this._timeTicks} - > - - max * 1.2]} - /> - - - - - - )} -
-
- ); - } +