diff --git a/.github/workflows/api-ee.yaml b/.github/workflows/api-ee.yaml index a6a3998cb..f30c1b111 100644 --- a/.github/workflows/api-ee.yaml +++ b/.github/workflows/api-ee.yaml @@ -67,7 +67,10 @@ jobs: } && { echo "Skipping Security Checks" } - docker push $DOCKER_REPO/$image:$IMAGE_TAG + images=("chalice" "alerts") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done - name: Creating old image input run: | # @@ -107,7 +110,7 @@ jobs: cat /tmp/image_override.yaml # Deploy command - helm upgrade --install openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set skipMigration=true --no-hooks + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f - env: DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} # We're not passing -ee flag, because helm will add that. diff --git a/.github/workflows/api.yaml b/.github/workflows/api.yaml index e85775ed2..ec83c1c3c 100644 --- a/.github/workflows/api.yaml +++ b/.github/workflows/api.yaml @@ -66,7 +66,10 @@ jobs: } && { echo "Skipping Security Checks" } - docker push $DOCKER_REPO/$image:$IMAGE_TAG + images=("chalice" "alerts") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done - name: Creating old image input run: | # @@ -131,4 +134,4 @@ jobs: # DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} # IMAGE_TAG: ${{ github.sha }} # ENVIRONMENT: staging - # + diff --git a/.github/workflows/utilities.yaml b/.github/workflows/utilities.yaml index 92e130c84..afbc85043 100644 --- a/.github/workflows/utilities.yaml +++ b/.github/workflows/utilities.yaml @@ -43,7 +43,7 @@ jobs: PUSH_IMAGE=1 bash build.sh - name: Deploy to kubernetes run: | - cd scripts/helm/ + cd scripts/helmcharts/ sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\" #g" vars.yaml sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\" #g" vars.yaml diff --git a/.github/workflows/workers-ee.yaml b/.github/workflows/workers-ee.yaml index 3f32314f9..b75d22093 100644 --- a/.github/workflows/workers-ee.yaml +++ b/.github/workflows/workers-ee.yaml @@ -71,12 +71,10 @@ jobs: case ${build_param} in false) { - git diff --name-only HEAD HEAD~1 | grep -E "backend/cmd|backend/services" | grep -vE ^ee/ | cut -d '/' -f3 - git diff --name-only HEAD HEAD~1 | grep -E "backend/pkg|backend/internal" | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do grep -rl "pkg/$pkg_name" backend/services backend/cmd | cut -d '/' -f3 done - } | uniq > /tmp/images_to_build.txt + } | awk '!seen[$0]++' > /tmp/images_to_build.txt ;; all) ls backend/cmd > /tmp/images_to_build.txt @@ -160,17 +158,16 @@ jobs: # Deploy command helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true | kubectl apply -f - - #- name: Alert slack - # if: ${{ failure() }} - # uses: rtCamp/action-slack-notify@v2 - # env: - # SLACK_CHANNEL: ee - # SLACK_TITLE: "Failed ${{ github.workflow }}" - # SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' - # SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} - # SLACK_USERNAME: "OR Bot" - # SLACK_MESSAGE: 'Build failed :bomb:' - + - name: Alert slack + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: ee + SLACK_TITLE: "Failed ${{ github.workflow }}" + SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} + SLACK_USERNAME: "OR Bot" + SLACK_MESSAGE: 'Build failed :bomb:' # - name: Debug Job # if: ${{ failure() }} diff --git a/.github/workflows/workers.yaml b/.github/workflows/workers.yaml index 21fc55c06..0d9927df9 100644 --- a/.github/workflows/workers.yaml +++ b/.github/workflows/workers.yaml @@ -71,12 +71,10 @@ jobs: case ${build_param} in false) { - git diff --name-only HEAD HEAD~1 | grep -E "backend/cmd|backend/services" | grep -vE ^ee/ | cut -d '/' -f3 - git diff --name-only HEAD HEAD~1 | grep -E "backend/pkg|backend/internal" | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do grep -rl "pkg/$pkg_name" backend/services backend/cmd | cut -d '/' -f3 done - } | uniq > /tmp/images_to_build.txt + } | awk '!seen[$0]++' > /tmp/images_to_build.txt ;; all) ls backend/cmd > /tmp/images_to_build.txt diff --git a/api/.trivyignore b/api/.trivyignore new file mode 100644 index 000000000..02f167862 --- /dev/null +++ b/api/.trivyignore @@ -0,0 +1,3 @@ +# Accept the risk until +# python setup tools recently fixed. Not yet avaialable in distros. +CVE-2022-40897 exp:2023-02-01 diff --git a/api/Dockerfile b/api/Dockerfile index 036dcb0f2..dec3156cb 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,6 +1,9 @@ FROM python:3.10-alpine LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA + RUN apk add --no-cache build-base tini ARG envarg # Add Tini @@ -9,7 +12,8 @@ ENV SOURCE_MAP_VERSION=0.7.4 \ APP_NAME=chalice \ LISTEN_PORT=8000 \ PRIVATE_ENDPOINTS=false \ - ENTERPRISE_BUILD=${envarg} + ENTERPRISE_BUILD=${envarg} \ + GIT_SHA=$GIT_SHA WORKDIR /work COPY requirements.txt ./requirements.txt diff --git a/api/Dockerfile_alerts b/api/Dockerfile_alerts index 881b21fb9..117e28456 100644 --- a/api/Dockerfile_alerts +++ b/api/Dockerfile_alerts @@ -1,6 +1,9 @@ FROM python:3.10-alpine LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA + RUN apk add --no-cache build-base tini ARG envarg ENV APP_NAME=alerts \ @@ -8,6 +11,7 @@ ENV APP_NAME=alerts \ PG_MAXCONN=10 \ LISTEN_PORT=8000 \ PRIVATE_ENDPOINTS=true \ + GIT_SHA=$GIT_SHA \ ENTERPRISE_BUILD=${envarg} WORKDIR /work diff --git a/api/build.sh b/api/build.sh index 895f9bb8e..0e82088fe 100644 --- a/api/build.sh +++ b/api/build.sh @@ -16,7 +16,8 @@ exit_err() { } environment=$1 -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} envarg="default-foss" check_prereq() { which docker || { @@ -41,14 +42,17 @@ function build_api(){ tag="ee-" } mv Dockerfile.dockerignore .dockerignore - docker build -f ./Dockerfile --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/chalice:${git_sha1} . + docker build -f ./Dockerfile --build-arg envarg=$envarg --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/chalice:${image_tag} . cd ../api rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/chalice:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/chalice:${git_sha1} ${DOCKER_REPO:-'local'}/chalice:${tag}latest + docker push ${DOCKER_REPO:-'local'}/chalice:${image_tag} + docker tag ${DOCKER_REPO:-'local'}/chalice:${image_tag} ${DOCKER_REPO:-'local'}/chalice:${tag}latest docker push ${DOCKER_REPO:-'local'}/chalice:${tag}latest } + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/chalice:${image_tag} + } echo "api docker build completed" } diff --git a/api/build_alerts.sh b/api/build_alerts.sh index a36472a8d..81c4cdb36 100644 --- a/api/build_alerts.sh +++ b/api/build_alerts.sh @@ -7,7 +7,8 @@ # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} envarg="default-foss" check_prereq() { which docker || { @@ -31,12 +32,12 @@ function build_alerts(){ tag="ee-" } mv Dockerfile_alerts.dockerignore .dockerignore - docker build -f ./Dockerfile_alerts --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/alerts:${git_sha1} . + docker build -f ./Dockerfile_alerts --build-arg envarg=$envarg --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/alerts:${image_tag} . cd ../api rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/alerts:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/alerts:${git_sha1} ${DOCKER_REPO:-'local'}/alerts:${tag}latest + docker push ${DOCKER_REPO:-'local'}/alerts:${image_tag} + docker tag ${DOCKER_REPO:-'local'}/alerts:${image_tag} ${DOCKER_REPO:-'local'}/alerts:${tag}latest docker push ${DOCKER_REPO:-'local'}/alerts:${tag}latest } echo "completed alerts build" diff --git a/api/chalicelib/utils/html/alert_notification.html b/api/chalicelib/utils/html/alert_notification.html index 2d63341f3..0f475dae1 100644 --- a/api/chalicelib/utils/html/alert_notification.html +++ b/api/chalicelib/utils/html/alert_notification.html @@ -1,60 +1,72 @@ - - - - + + + +
-
- - - - - - + +
-
- OpenReplay -
-
+ + + - - - -
+
+ + + + + - - - - - - - - + - -
+
+ OpenReplay +
+
-

- New alert!

-

- %(message)s

-

- See metrics for more details.

+ - -
-
-
-

- Sent with ♡ from OpenReplay © 2022 - All rights reserved.

- https://openreplay.com/ +

+

+ New alert!

+

+ %(message)s

+

+ See metrics for more details.

- -
+
+ + +
+
+
+
+

+ Sent with ♡ from OpenReplay © + - All rights + reserved.

+ https://openreplay.com/ +

+ +
+
+ + + + + - + \ No newline at end of file diff --git a/backend/Dockerfile b/backend/Dockerfile index ae6a5db18..043de51cd 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -19,10 +19,14 @@ RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o service -tags dynamic open FROM alpine AS entrypoint +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA + RUN apk add --no-cache ca-certificates librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5 RUN adduser -u 1001 openreplay -D ENV TZ=UTC \ + GIT_SHA=$GIT_SHA \ FS_ULIMIT=1000 \ FS_DIR=/mnt/efs \ MAXMINDDB_FILE=/home/openreplay/geoip.mmdb \ diff --git a/backend/build.sh b/backend/build.sh index 073f540df..95a833139 100755 --- a/backend/build.sh +++ b/backend/build.sh @@ -9,7 +9,8 @@ # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh set -e -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} ee="false" check_prereq() { which docker || { @@ -22,9 +23,12 @@ check_prereq() { function build_service() { image="$1" echo "BUILDING $image" - docker build -t ${DOCKER_REPO:-'local'}/$image:${git_sha1} --platform linux/amd64 --build-arg SERVICE_NAME=$image . + docker build -t ${DOCKER_REPO:-'local'}/$image:${image_tag} --platform linux/amd64 --build-arg SERVICE_NAME=$image --build-arg GIT_SHA=$git_sha . [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/$image:${git_sha1} + docker push ${DOCKER_REPO:-'local'}/$image:${image_tag} + } + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/$image:${image_tag} } echo "Build completed for $image" return @@ -51,7 +55,7 @@ function build_api(){ for image in $(ls cmd); do build_service $image - echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${git_sha1}" + echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${image_tag}" done cd ../backend rm -rf ../${destination} diff --git a/backend/internal/storage/storage.go b/backend/internal/storage/storage.go index 75a91d594..3b315561d 100644 --- a/backend/internal/storage/storage.go +++ b/backend/internal/storage/storage.go @@ -44,6 +44,8 @@ type Storage struct { readingDEVTime syncfloat64.Histogram archivingDOMTime syncfloat64.Histogram archivingDEVTime syncfloat64.Histogram + uploadingDOMTime syncfloat64.Histogram + uploadingDEVTime syncfloat64.Histogram tasks chan *Task ready chan struct{} @@ -85,6 +87,14 @@ func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Stor if err != nil { log.Printf("can't create archiving_duration metric: %s", err) } + uploadingDOMTime, err := metrics.RegisterHistogram("uploading_duration") + if err != nil { + log.Printf("can't create uploading_duration metric: %s", err) + } + uploadingDEVTime, err := metrics.RegisterHistogram("uploading_dt_duration") + if err != nil { + log.Printf("can't create uploading_duration metric: %s", err) + } newStorage := &Storage{ cfg: cfg, s3: s3, @@ -96,6 +106,8 @@ func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Stor readingDEVTime: readingDEVTime, archivingDOMTime: archivingDOMTime, archivingDEVTime: archivingDEVTime, + uploadingDOMTime: uploadingDOMTime, + uploadingDEVTime: uploadingDEVTime, tasks: make(chan *Task, 1), ready: make(chan struct{}), } @@ -119,13 +131,13 @@ func (s *Storage) Upload(msg *messages.SessionEnd) (err error) { wg.Add(2) go func() { if prepErr := s.prepareSession(filePath, DOM, newTask); prepErr != nil { - err = fmt.Errorf("prepareSession err: %s", prepErr) + err = fmt.Errorf("prepareSession DOM err: %s", prepErr) } wg.Done() }() go func() { if prepErr := s.prepareSession(filePath, DEV, newTask); prepErr != nil { - err = fmt.Errorf("prepareSession err: %s", prepErr) + err = fmt.Errorf("prepareSession DEV err: %s", prepErr) } wg.Done() }() @@ -237,33 +249,46 @@ func (s *Storage) compressSession(data []byte) *bytes.Buffer { func (s *Storage) uploadSession(task *Task) { wg := &sync.WaitGroup{} wg.Add(3) + var ( + uploadDoms int64 = 0 + uploadDome int64 = 0 + uploadDev int64 = 0 + ) go func() { if task.doms != nil { + start := time.Now() if err := s.s3.Upload(task.doms, task.id+string(DOM)+"s", "application/octet-stream", true); err != nil { log.Fatalf("Storage: start upload failed. %s", err) } + uploadDoms = time.Now().Sub(start).Milliseconds() } wg.Done() }() go func() { if task.dome != nil { + start := time.Now() if err := s.s3.Upload(task.dome, task.id+string(DOM)+"e", "application/octet-stream", true); err != nil { log.Fatalf("Storage: start upload failed. %s", err) } + uploadDome = time.Now().Sub(start).Milliseconds() } wg.Done() }() go func() { if task.dev != nil { + start := time.Now() if err := s.s3.Upload(task.dev, task.id+string(DEV), "application/octet-stream", true); err != nil { log.Fatalf("Storage: start upload failed. %s", err) } + uploadDev = time.Now().Sub(start).Milliseconds() } wg.Done() }() wg.Wait() // Record metrics ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) + s.uploadingDOMTime.Record(ctx, float64(uploadDoms+uploadDome)) + s.uploadingDEVTime.Record(ctx, float64(uploadDev)) s.totalSessions.Add(ctx, 1) } diff --git a/backend/pkg/db/postgres/bulk.go b/backend/pkg/db/postgres/bulk.go index 16f59efcd..7b9bf90c8 100644 --- a/backend/pkg/db/postgres/bulk.go +++ b/backend/pkg/db/postgres/bulk.go @@ -2,8 +2,14 @@ package postgres import ( "bytes" + "context" "errors" "fmt" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/metric/instrument/syncfloat64" + "log" + "openreplay/backend/pkg/monitoring" + "time" ) const ( @@ -18,13 +24,15 @@ type Bulk interface { } type bulkImpl struct { - conn Pool - table string - columns string - template string - setSize int - sizeLimit int - values []interface{} + conn Pool + table string + columns string + template string + setSize int + sizeLimit int + values []interface{} + bulkSize syncfloat64.Histogram + bulkDuration syncfloat64.Histogram } func (b *bulkImpl) Append(args ...interface{}) error { @@ -46,6 +54,8 @@ func (b *bulkImpl) Send() error { } func (b *bulkImpl) send() error { + start := time.Now() + size := len(b.values) / b.setSize request := bytes.NewBufferString(insertPrefix + b.table + b.columns + insertValues) args := make([]interface{}, b.setSize) for i := 0; i < len(b.values)/b.setSize; i++ { @@ -63,13 +73,19 @@ func (b *bulkImpl) send() error { if err != nil { return fmt.Errorf("send bulk err: %s", err) } + // Save bulk metrics + ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) + b.bulkDuration.Record(ctx, float64(time.Now().Sub(start).Milliseconds()), attribute.String("table", b.table)) + b.bulkSize.Record(ctx, float64(size), attribute.String("table", b.table)) return nil } -func NewBulk(conn Pool, table, columns, template string, setSize, sizeLimit int) (Bulk, error) { +func NewBulk(conn Pool, metrics *monitoring.Metrics, table, columns, template string, setSize, sizeLimit int) (Bulk, error) { switch { case conn == nil: return nil, errors.New("db conn is empty") + case metrics == nil: + return nil, errors.New("metrics is empty") case table == "": return nil, errors.New("table is empty") case columns == "": @@ -81,13 +97,23 @@ func NewBulk(conn Pool, table, columns, template string, setSize, sizeLimit int) case sizeLimit <= 0: return nil, errors.New("size limit is wrong") } + messagesInBulk, err := metrics.RegisterHistogram("messages_in_bulk") + if err != nil { + log.Printf("can't create messages_size metric: %s", err) + } + bulkInsertDuration, err := metrics.RegisterHistogram("bulk_insert_duration") + if err != nil { + log.Printf("can't create messages_size metric: %s", err) + } return &bulkImpl{ - conn: conn, - table: table, - columns: columns, - template: template, - setSize: setSize, - sizeLimit: sizeLimit, - values: make([]interface{}, 0, setSize*sizeLimit), + conn: conn, + table: table, + columns: columns, + template: template, + setSize: setSize, + sizeLimit: sizeLimit, + values: make([]interface{}, 0, setSize*sizeLimit), + bulkSize: messagesInBulk, + bulkDuration: bulkInsertDuration, }, nil } diff --git a/backend/pkg/db/postgres/connector.go b/backend/pkg/db/postgres/connector.go index 51773c1e2..73859fd39 100644 --- a/backend/pkg/db/postgres/connector.go +++ b/backend/pkg/db/postgres/connector.go @@ -78,7 +78,7 @@ func NewConn(url string, queueLimit, sizeLimit int, metrics *monitoring.Metrics) if err != nil { log.Fatalf("can't create new pool wrapper: %s", err) } - conn.initBulks() + conn.initBulks(metrics) return conn } @@ -107,9 +107,9 @@ func (conn *Conn) initMetrics(metrics *monitoring.Metrics) { } } -func (conn *Conn) initBulks() { +func (conn *Conn) initBulks(metrics *monitoring.Metrics) { var err error - conn.autocompletes, err = NewBulk(conn.c, + conn.autocompletes, err = NewBulk(conn.c, metrics, "autocomplete", "(value, type, project_id)", "($%d, $%d, $%d)", @@ -117,7 +117,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create autocomplete bulk: %s", err) } - conn.requests, err = NewBulk(conn.c, + conn.requests, err = NewBulk(conn.c, metrics, "events_common.requests", "(session_id, timestamp, seq_index, url, duration, success)", "($%d, $%d, $%d, left($%d, 2700), $%d, $%d)", @@ -125,7 +125,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create requests bulk: %s", err) } - conn.customEvents, err = NewBulk(conn.c, + conn.customEvents, err = NewBulk(conn.c, metrics, "events_common.customs", "(session_id, timestamp, seq_index, name, payload)", "($%d, $%d, $%d, left($%d, 2700), $%d)", @@ -133,7 +133,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create customEvents bulk: %s", err) } - conn.webPageEvents, err = NewBulk(conn.c, + conn.webPageEvents, err = NewBulk(conn.c, metrics, "events.pages", "(session_id, message_id, timestamp, referrer, base_referrer, host, path, query, dom_content_loaded_time, "+ "load_time, response_end, first_paint_time, first_contentful_paint_time, speed_index, visually_complete, "+ @@ -144,7 +144,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webInputEvents, err = NewBulk(conn.c, + conn.webInputEvents, err = NewBulk(conn.c, metrics, "events.inputs", "(session_id, message_id, timestamp, value, label)", "($%d, $%d, $%d, $%d, NULLIF($%d,''))", @@ -152,7 +152,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webGraphQL, err = NewBulk(conn.c, + conn.webGraphQL, err = NewBulk(conn.c, metrics, "events.graphql", "(session_id, timestamp, message_id, name, request_body, response_body)", "($%d, $%d, $%d, left($%d, 2700), $%d, $%d)", @@ -160,7 +160,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webErrors, err = NewBulk(conn.c, + conn.webErrors, err = NewBulk(conn.c, metrics, "errors", "(error_id, project_id, source, name, message, payload)", "($%d, $%d, $%d, $%d, $%d, $%d::jsonb)", @@ -168,7 +168,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webErrors bulk: %s", err) } - conn.webErrorEvents, err = NewBulk(conn.c, + conn.webErrorEvents, err = NewBulk(conn.c, metrics, "events.errors", "(session_id, message_id, timestamp, error_id)", "($%d, $%d, $%d, $%d)", @@ -176,7 +176,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webErrorEvents bulk: %s", err) } - conn.webErrorTags, err = NewBulk(conn.c, + conn.webErrorTags, err = NewBulk(conn.c, metrics, "public.errors_tags", "(session_id, message_id, error_id, key, value)", "($%d, $%d, $%d, $%d, $%d)", @@ -184,7 +184,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webErrorEvents bulk: %s", err) } - conn.webIssues, err = NewBulk(conn.c, + conn.webIssues, err = NewBulk(conn.c, metrics, "issues", "(project_id, issue_id, type, context_string)", "($%d, $%d, $%d, $%d)", @@ -192,7 +192,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webIssues bulk: %s", err) } - conn.webIssueEvents, err = NewBulk(conn.c, + conn.webIssueEvents, err = NewBulk(conn.c, metrics, "events_common.issues", "(session_id, issue_id, timestamp, seq_index, payload)", "($%d, $%d, $%d, $%d, CAST($%d AS jsonb))", @@ -200,7 +200,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webIssueEvents bulk: %s", err) } - conn.webCustomEvents, err = NewBulk(conn.c, + conn.webCustomEvents, err = NewBulk(conn.c, metrics, "events_common.customs", "(session_id, seq_index, timestamp, name, payload, level)", "($%d, $%d, $%d, left($%d, 2700), $%d, $%d)", @@ -208,7 +208,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webCustomEvents bulk: %s", err) } - conn.webClickEvents, err = NewBulk(conn.c, + conn.webClickEvents, err = NewBulk(conn.c, metrics, "events.clicks", "(session_id, message_id, timestamp, label, selector, url, path)", "($%d, $%d, $%d, NULLIF($%d, ''), $%d, $%d, $%d)", @@ -216,7 +216,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webClickEvents bulk: %s", err) } - conn.webNetworkRequest, err = NewBulk(conn.c, + conn.webNetworkRequest, err = NewBulk(conn.c, metrics, "events_common.requests", "(session_id, timestamp, seq_index, url, host, path, query, request_body, response_body, status_code, method, duration, success)", "($%d, $%d, $%d, left($%d, 2700), $%d, $%d, $%d, $%d, $%d, $%d::smallint, NULLIF($%d, '')::http_method, $%d, $%d)", diff --git a/backend/pkg/monitoring/metrics.go b/backend/pkg/monitoring/metrics.go index d3cd807c6..803fba127 100644 --- a/backend/pkg/monitoring/metrics.go +++ b/backend/pkg/monitoring/metrics.go @@ -38,7 +38,7 @@ func New(name string) *Metrics { // initPrometheusDataExporter allows to use collected metrics in prometheus func (m *Metrics) initPrometheusDataExporter() { config := prometheus.Config{ - DefaultHistogramBoundaries: []float64{1, 2, 5, 10, 20, 50}, + DefaultHistogramBoundaries: []float64{1, 2, 5, 10, 20, 50, 100, 250, 500, 1000}, } c := controller.New( processor.NewFactory( @@ -76,8 +76,8 @@ Counter is a synchronous instrument that measures additive non-decreasing values */ func (m *Metrics) RegisterCounter(name string) (syncfloat64.Counter, error) { - if _, ok := m.counters[name]; ok { - return nil, fmt.Errorf("counter %s already exists", name) + if counter, ok := m.counters[name]; ok { + return counter, nil } counter, err := m.meter.SyncFloat64().Counter(name) if err != nil { @@ -100,8 +100,8 @@ for example, the number of: */ func (m *Metrics) RegisterUpDownCounter(name string) (syncfloat64.UpDownCounter, error) { - if _, ok := m.upDownCounters[name]; ok { - return nil, fmt.Errorf("upDownCounter %s already exists", name) + if counter, ok := m.upDownCounters[name]; ok { + return counter, nil } counter, err := m.meter.SyncFloat64().UpDownCounter(name) if err != nil { @@ -122,8 +122,8 @@ Histogram is a synchronous instrument that produces a histogram from recorded va */ func (m *Metrics) RegisterHistogram(name string) (syncfloat64.Histogram, error) { - if _, ok := m.histograms[name]; ok { - return nil, fmt.Errorf("histogram %s already exists", name) + if hist, ok := m.histograms[name]; ok { + return hist, nil } hist, err := m.meter.SyncFloat64().Histogram(name) if err != nil { diff --git a/ee/api/build_crons.sh b/ee/api/build_crons.sh index 2f9d84a81..4d246265a 100644 --- a/ee/api/build_crons.sh +++ b/ee/api/build_crons.sh @@ -36,6 +36,9 @@ function build_crons(){ docker tag ${DOCKER_REPO:-'local'}/crons:${git_sha1} ${DOCKER_REPO:-'local'}/crons:${tag}latest docker push ${DOCKER_REPO:-'local'}/crons:${tag}latest } + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/crons:${image_tag} + } echo "completed crons build" } diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 5e6c9b3b0..ccd4655ee 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -14,9 +14,13 @@ COPY nginx.conf /etc/nginx/conf.d/default.conf # Default step in docker build FROM nginx:alpine LABEL maintainer=Rajesh +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA COPY --from=builder /work/public /var/www/openreplay COPY nginx.conf /etc/nginx/conf.d/default.conf +ENV GIT_SHA=$GIT_SHA + EXPOSE 8080 RUN chown -R nginx:nginx /var/cache/nginx && \ chown -R nginx:nginx /var/log/nginx && \ diff --git a/frontend/app/Router.js b/frontend/app/Router.js index 955cfa0b3..584fb59e5 100644 --- a/frontend/app/Router.js +++ b/frontend/app/Router.js @@ -89,7 +89,7 @@ const MULTIVIEW_INDEX_PATH = routes.multiviewIndex(); @connect( (state) => { const siteId = state.getIn(['site', 'siteId']); - const jwt = state.get('jwt'); + const jwt = state.getIn(['user', 'jwt']); const changePassword = state.getIn(['user', 'account', 'changePassword']); const userInfoLoading = state.getIn(['user', 'fetchUserInfoRequest', 'loading']); return { diff --git a/frontend/app/api_client.js b/frontend/app/api_client.js index 80922efe2..a9e092486 100644 --- a/frontend/app/api_client.js +++ b/frontend/app/api_client.js @@ -58,7 +58,7 @@ export const clean = (obj, forbidenValues = [ undefined, '' ]) => { export default class APIClient { constructor() { - const jwt = store.getState().get('jwt'); + const jwt = store.getState().getIn(['user', 'jwt']); const siteId = store.getState().getIn([ 'site', 'siteId' ]); this.init = { headers: { diff --git a/frontend/app/api_middleware.js b/frontend/app/api_middleware.js index f01f107ec..2b4fd4b4a 100644 --- a/frontend/app/api_middleware.js +++ b/frontend/app/api_middleware.js @@ -1,8 +1,8 @@ import logger from 'App/logger'; import APIClient from './api_client'; -import { UPDATE, DELETE } from './duck/jwt'; +import { UPDATE_JWT } from './duck/user'; -export default (store) => (next) => (action) => { +export default () => (next) => (action) => { const { types, call, ...rest } = action; if (!call) { return next(action); @@ -14,7 +14,7 @@ export default (store) => (next) => (action) => { return call(client) .then(async (response) => { if (response.status === 403) { - next({ type: DELETE }); + next({ type: UPDATE_JWT, data: null }); } if (!response.ok) { const text = await response.text(); @@ -30,7 +30,7 @@ export default (store) => (next) => (action) => { next({ type: SUCCESS, data, ...rest }); } if (jwt) { - next({ type: UPDATE, data: jwt }); + next({ type: UPDATE_JWT, data: jwt }); } }) .catch((e) => { diff --git a/frontend/app/components/Login/Login.js b/frontend/app/components/Login/Login.js index c04e83f12..70ee0f74e 100644 --- a/frontend/app/components/Login/Login.js +++ b/frontend/app/components/Login/Login.js @@ -8,7 +8,7 @@ import ReCAPTCHA from 'react-google-recaptcha'; import { withRouter } from 'react-router-dom'; import stl from './login.module.css'; import cn from 'classnames'; -import { setJwt } from 'Duck/jwt'; +import { setJwt } from 'Duck/user'; const FORGOT_PASSWORD = forgotPassword(); const SIGNUP_ROUTE = signup(); diff --git a/frontend/app/components/Session/WebPlayer.tsx b/frontend/app/components/Session/WebPlayer.tsx index 71e70bd62..f6f3cfb71 100644 --- a/frontend/app/components/Session/WebPlayer.tsx +++ b/frontend/app/components/Session/WebPlayer.tsx @@ -150,6 +150,7 @@ export default connect( session: state.getIn(['sessions', 'current']), insights: state.getIn(['sessions', 'insights']), visitedEvents: state.getIn(['sessions', 'visitedEvents']), + jwt: state.getIn(['user', 'jwt']), fullscreen: state.getIn(['components', 'player', 'fullscreen']), showEvents: state.get('showEvents'), members: state.getIn(['members', 'list']), diff --git a/frontend/app/duck/index.ts b/frontend/app/duck/index.ts index a5ea4fcd6..4e8e24d8f 100644 --- a/frontend/app/duck/index.ts +++ b/frontend/app/duck/index.ts @@ -1,7 +1,6 @@ // @ts-ignore import { combineReducers } from 'redux-immutable'; -import jwt from './jwt'; import user from './user'; import sessions from './sessions'; import assignments from './assignments'; @@ -26,7 +25,6 @@ import search from './search'; import liveSearch from './liveSearch'; const rootReducer = combineReducers({ - jwt, user, sessions, assignments, diff --git a/frontend/app/duck/jwt.js b/frontend/app/duck/jwt.js deleted file mode 100644 index 4d4147f34..000000000 --- a/frontend/app/duck/jwt.js +++ /dev/null @@ -1,19 +0,0 @@ -export const UPDATE = 'jwt/UPDATE'; -export const DELETE = 'jwt/DELETE'; - -export default (state = null, action = {}) => { - switch (action.type) { - case UPDATE: - return action.data; - case DELETE: - return null; - } - return state; -}; - -export function setJwt(data) { - return { - type: UPDATE, - data, - }; -} diff --git a/frontend/app/duck/user.js b/frontend/app/duck/user.js index 072b7aeff..36c7dd778 100644 --- a/frontend/app/duck/user.js +++ b/frontend/app/duck/user.js @@ -2,7 +2,6 @@ import { List, Map } from 'immutable'; import Client from 'Types/client'; import { deleteCookie } from 'App/utils'; import Account from 'Types/account'; -import { DELETE } from './jwt'; import withRequestState, { RequestTypes } from './requestStateCreator'; export const LOGIN = new RequestTypes('user/LOGIN'); @@ -20,7 +19,7 @@ const PUT_CLIENT = new RequestTypes('user/PUT_CLIENT'); const PUSH_NEW_SITE = 'user/PUSH_NEW_SITE'; const SET_ONBOARDING = 'user/SET_ONBOARDING'; -const initialState = Map({ +export const initialState = Map({ account: Account(), siteId: null, passwordRequestError: false, @@ -28,7 +27,8 @@ const initialState = Map({ tenants: [], authDetails: {}, onboarding: false, - sites: List() + sites: List(), + jwt: null }); const setClient = (state, data) => { @@ -36,8 +36,20 @@ const setClient = (state, data) => { return state.set('client', client) } +export const UPDATE_JWT = 'jwt/UPDATE'; +export const DELETE = 'jwt/DELETE' +export function setJwt(data) { + return { + type: UPDATE_JWT, + data, + }; +} + + const reducer = (state = initialState, action = {}) => { switch (action.type) { + case UPDATE_JWT: + return state.set('jwt', action.data); case RESET_PASSWORD.SUCCESS: case UPDATE_PASSWORD.SUCCESS: case LOGIN.SUCCESS: @@ -54,7 +66,10 @@ const reducer = (state = initialState, action = {}) => { // return state.set('tenants', action.data.map(i => ({ text: i.name, value: i.tenantId}))); case UPDATE_PASSWORD.FAILURE: return state.set('passwordErrors', List(action.errors)) + case FETCH_ACCOUNT.FAILURE: + case LOGIN.FAILURE: case DELETE: + console.log('hi') deleteCookie('jwt', '/', '.openreplay.com') return initialState; case PUT_CLIENT.REQUEST: @@ -114,12 +129,10 @@ export function fetchTenants() { } } -export const fetchUserInfo = () => dispatch => Promise.all([ - dispatch({ +export const fetchUserInfo = () => ({ types: FETCH_ACCOUNT.toArray(), call: client => client.get('/account'), - }), -]); + }); export function logout() { return { diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index 46b694329..a53c086f1 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -123,7 +123,10 @@ export default class MessageManager { private readonly screen: Screen, initialLists?: Partial ) { - this.pagesManager = new PagesManager(screen, this.session.isMobile, this) + this.pagesManager = new PagesManager(screen, this.session.isMobile, cssLoading => { + screen.displayFrame(!cssLoading) + state.update({ cssLoading }) + }) this.mouseMoveManager = new MouseMoveManager(screen) this.sessionStart = this.session.startedAt @@ -483,11 +486,6 @@ export default class MessageManager { this.state.update({ messagesLoading }); } - setCSSLoading(cssLoading: boolean) { - this.screen.displayFrame(!cssLoading); - this.state.update({ cssLoading }); - } - private setSize({ height, width }: { height: number, width: number }) { this.screen.scale({ height, width }); this.state.update({ width, height }); diff --git a/frontend/app/player/web/WebLivePlayer.ts b/frontend/app/player/web/WebLivePlayer.ts index 87e88225a..81f1c2178 100644 --- a/frontend/app/player/web/WebLivePlayer.ts +++ b/frontend/app/player/web/WebLivePlayer.ts @@ -28,7 +28,7 @@ export default class WebLivePlayer extends WebPlayer { this.assistManager = new AssistManager( session, - f => this.messageManager.setCSSLoading(f), + f => this.messageManager.setMessagesLoading(f), (msg, idx) => { this.incomingMessages.push(msg) if (!this.historyFileIsLoading) { diff --git a/frontend/app/player/web/managers/DOM/DOMManager.ts b/frontend/app/player/web/managers/DOM/DOMManager.ts index d0005352c..e47a803fd 100644 --- a/frontend/app/player/web/managers/DOM/DOMManager.ts +++ b/frontend/app/player/web/managers/DOM/DOMManager.ts @@ -56,10 +56,10 @@ export default class DOMManager extends ListWalker { private readonly screen: Screen, private readonly isMobile: boolean, public readonly time: number, - mm: MessageManager, + setCssLoading: ConstructorParameters[1], ) { super() - this.stylesManager = new StylesManager(screen, mm) + this.stylesManager = new StylesManager(screen, setCssLoading) } append(m: Message): void { diff --git a/frontend/app/player/web/managers/DOM/StylesManager.ts b/frontend/app/player/web/managers/DOM/StylesManager.ts index 5cffa9be9..c38ea0281 100644 --- a/frontend/app/player/web/managers/DOM/StylesManager.ts +++ b/frontend/app/player/web/managers/DOM/StylesManager.ts @@ -24,7 +24,7 @@ export default class StylesManager { private linkLoadPromises: Array> = []; private skipCSSLinks: Array = []; // should be common for all pages - constructor(private readonly screen: Screen, private readonly mm: MessageManager) {} + constructor(private readonly screen: Screen, private readonly setLoading: (flag: boolean) => void) {} reset():void { this.linkLoadingCount = 0; @@ -38,7 +38,7 @@ export default class StylesManager { const promise = new Promise((resolve) => { if (this.skipCSSLinks.includes(value)) resolve(); this.linkLoadingCount++; - this.mm.setCSSLoading(true); + this.setLoading(true); const addSkipAndResolve = () => { this.skipCSSLinks.push(value); // watch out resolve() @@ -57,7 +57,7 @@ export default class StylesManager { clearTimeout(timeoutId); this.linkLoadingCount--; if (this.linkLoadingCount === 0) { - this.mm.setCSSLoading(false); + this.setLoading(false); } }); this.linkLoadPromises.push(promise); diff --git a/frontend/app/player/web/managers/PagesManager.ts b/frontend/app/player/web/managers/PagesManager.ts index e4248f2ab..35d47a670 100644 --- a/frontend/app/player/web/managers/PagesManager.ts +++ b/frontend/app/player/web/managers/PagesManager.ts @@ -10,14 +10,18 @@ import DOMManager from './DOM/DOMManager'; export default class PagesManager extends ListWalker { private currentPage: DOMManager | null = null - constructor(private screen: Screen, private isMobile: boolean, private mm: MessageManager) { super() } + constructor( + private screen: Screen, + private isMobile: boolean, + private setCssLoading: ConstructorParameters[3], +) { super() } /* Assumed that messages added in a correct time sequence. */ appendMessage(m: Message): void { if (m.tp === MType.CreateDocument) { - super.append(new DOMManager(this.screen, this.isMobile, m.time, this.mm)) + super.append(new DOMManager(this.screen, this.isMobile, m.time, this.setCssLoading)) } if (this.last === null) { // Log wrong diff --git a/frontend/app/store.js b/frontend/app/store.js index dd1434a0c..a2379496c 100644 --- a/frontend/app/store.js +++ b/frontend/app/store.js @@ -4,26 +4,32 @@ import { Map } from 'immutable'; import indexReducer from './duck'; import apiMiddleware from './api_middleware'; import LocalStorage from './local_storage'; +import { initialState as initUserState, UPDATE_JWT } from './duck/user' const storage = new LocalStorage({ - jwt: String, + user: Object, }); const composeEnhancers = window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ && window.env.NODE_ENV === "development" ? window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ : compose; const storageState = storage.state(); -const initialState = Map({ - jwt: storageState.jwt, - // TODO: store user -}); +const initialState = Map({ user: initUserState.update('jwt', () => storageState.user?.jwt || null) }); const store = createStore(indexReducer, initialState, composeEnhancers(applyMiddleware(thunk, apiMiddleware))); store.subscribe(() => { const state = store.getState(); + storage.sync({ - jwt: state.get('jwt') + user: state.get('user') }); }); +window.getJWT = () => { + console.log(JSON.stringify(storage.state().user?.jwt || 'not logged in')); +} +window.setJWT = (jwt) => { + store.dispatch({ type: UPDATE_JWT, data: jwt }) +} + export default store; diff --git a/frontend/build.sh b/frontend/build.sh index f57d98af2..55d295746 100644 --- a/frontend/build.sh +++ b/frontend/build.sh @@ -8,7 +8,8 @@ # Example # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} ee="false" check_prereq() { which docker || { @@ -21,11 +22,14 @@ check_prereq() { export DOCKER_BUILDKIT=1 function build(){ # Run docker as the same user, else we'll run in to permission issues. - docker build -t ${DOCKER_REPO:-'local'}/frontend:${git_sha1} --platform linux/amd64 --build-arg SERVICE_NAME=$image . + docker build -t ${DOCKER_REPO:-'local'}/frontend:${image_tag} --platform linux/amd64 --build-arg GIT_SHA=$git_sha . [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/frontend:${git_sha1} + docker push ${DOCKER_REPO:-'local'}/frontend:${image_tag} } - echo "frotend build completed" + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/frontend:${image_tag} + } + echo "frontend build completed" } check_prereq diff --git a/peers/Dockerfile b/peers/Dockerfile index bfabf6bea..2c8254482 100644 --- a/peers/Dockerfile +++ b/peers/Dockerfile @@ -1,8 +1,11 @@ FROM node:18-alpine LABEL Maintainer="KRAIEM Taha Yassine" +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA RUN apk add --no-cache tini ARG envarg ENV PRIVATE_ENDPOINTS=false \ + GIT_SHA=$GIT_SHA \ ENTERPRISE_BUILD=${envarg} WORKDIR /work diff --git a/peers/build.sh b/peers/build.sh index e84b942ec..45cc97892 100644 --- a/peers/build.sh +++ b/peers/build.sh @@ -6,7 +6,8 @@ # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} check_prereq() { which docker || { echo "Docker not installed, please install docker." @@ -26,14 +27,17 @@ function build_api(){ [[ $1 == "ee" ]] && { cp -rf ../ee/peers/* ./ } - docker build -f ./Dockerfile -t ${DOCKER_REPO:-'local'}/peers:${git_sha1} . + docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/peers:${image_tag} . cd ../peers rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/peers:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/peers:${git_sha1} ${DOCKER_REPO:-'local'}/peers:latest + docker push ${DOCKER_REPO:-'local'}/peers:${image_tag} + docker tag ${DOCKER_REPO:-'local'}/peers:${image_tag} ${DOCKER_REPO:-'local'}/peers:latest docker push ${DOCKER_REPO:-'local'}/peers:latest } + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/peers:${image_tag} + } echo "peer docker build complted" } diff --git a/scripts/helmcharts/build_deploy.sh b/scripts/helmcharts/build_deploy.sh index e5714a1b2..cc5acd020 100644 --- a/scripts/helmcharts/build_deploy.sh +++ b/scripts/helmcharts/build_deploy.sh @@ -8,6 +8,12 @@ set -e # Removing local alpine:latest image docker rmi alpine || true +# Signing image +# cosign sign --key awskms:///alias/openreplay-container-sign image_url:tag +export SIGN_IMAGE=1 +export PUSH_IMAGE=1 +export SIGN_KEY="awskms:///alias/openreplay-container-sign" + echo $DOCKER_REPO [[ -z DOCKER_REPO ]] && { echo Set DOCKER_REPO="your docker registry" @@ -15,15 +21,15 @@ echo $DOCKER_REPO } || { docker login $DOCKER_REPO cd ../../backend - PUSH_IMAGE=1 bash build.sh $@ + bash build.sh $@ cd ../utilities - PUSH_IMAGE=1 bash build.sh $@ + bash build.sh $@ cd ../peers - PUSH_IMAGE=1 bash build.sh $@ + bash build.sh $@ cd ../frontend - PUSH_IMAGE=1 bash build.sh $@ + bash build.sh $@ cd ../sourcemap-reader - PUSH_IMAGE=1 bash build.sh $@ + bash build.sh $@ cd ../api - PUSH_IMAGE=1 bash build.sh $@ + bash build.sh $@ } diff --git a/scripts/helmcharts/openreplay-cli b/scripts/helmcharts/openreplay-cli index d4b36a663..903f9d312 100755 --- a/scripts/helmcharts/openreplay-cli +++ b/scripts/helmcharts/openreplay-cli @@ -7,12 +7,7 @@ set -eE -o pipefail # same as: `set -o errexit -o errtrace` trap err EXIT err() { - case "$?" in - 0) - ;; - *) - ;; - esac + exit $1 } # make all stderr red @@ -47,7 +42,7 @@ cat <<"EOF" EOF - echo -e "${green}Usage: openreplay-cli [ -h | --help ] + echo -e "${green}Usage: [DEBUG=1|SKIP_MIGRAION=1] openreplay-cli [ -h | --help ] [ -d | --status ] [ -v | --verbose ] [ -l | --logs SERVICE ] @@ -96,11 +91,10 @@ restart() { helmInstall() { - [[ FORCE_UPGRADE_FRONTENT -eq 1 ]] && { - helm upgrade --install openreplay -n app openreplay -f vars.yaml --set forceUpgradeFrontend=true - } || { - helm upgrade --install openreplay -n app openreplay -f vars.yaml - } + # Adding variables + [[ $SKIP_MIGRATION -eq 1 ]] && ARGS="--set skipMigration=true" + [[ $DEBUG -eq 1 ]] && ARGS="$ARGS --debug" + helm upgrade --install openreplay -n app openreplay -f vars.yaml $ARGS } upgrade() { @@ -145,4 +139,4 @@ do esac done -[[ $VERBOSE -eq 1 ]] && set -x +[[ $VERBOSE -eq 1 ]] && set -x || true diff --git a/scripts/helmcharts/openreplay/values.yaml b/scripts/helmcharts/openreplay/values.yaml index 33cc3e178..67cf00405 100644 --- a/scripts/helmcharts/openreplay/values.yaml +++ b/scripts/helmcharts/openreplay/values.yaml @@ -1,73 +1,6 @@ -# Default values for openreplay. -# This is a YAML-formatted file. -# Declare variables to be passed into your templates. - -replicaCount: 1 - -image: - repository: nginx - pullPolicy: IfNotPresent - # Overrides the image tag whose default is the chart appVersion. - tag: "" - -imagePullSecrets: [] -nameOverride: "" -fullnameOverride: "" - -serviceAccount: - # Specifies whether a service account should be created - create: true - # Annotations to add to the service account - annotations: {} - # The name of the service account to use. - # If not set and create is true, a name is generated using the fullname template - name: "" - -podAnnotations: {} - -securityContext: - runAsUser: 1001 - runAsGroup: 1001 -podSecurityContext: - runAsUser: 1001 - runAsGroup: 1001 - fsGroup: 1001 - fsGroupChangePolicy: "OnRootMismatch" -# podSecurityContext: {} - # fsGroup: 2000 - -# securityContext: {} - # capabilities: - # drop: - # - ALL - # readOnlyRootFilesystem: true - # runAsNonRoot: true - # runAsUser: 1000 - -resources: {} - # We usually recommend not to specify default resources and to leave this as a conscious - # choice for the user. This also increases chances charts run on environments with little - # resources, such as Minikube. If you do want to specify resources, uncomment the following - # lines, adjust them as necessary, and remove the curly braces after 'resources:'. - # limits: - # cpu: 100m - # memory: 128Mi - # requests: - # cpu: 100m - # memory: 128Mi - -autoscaling: - enabled: false - minReplicas: 1 - maxReplicas: 100 - targetCPUUtilizationPercentage: 80 - # targetMemoryUtilizationPercentage: 80 - -nodeSelector: {} - -tolerations: [] - -affinity: {} +redis: &redis + tls: + enabled: false ingress-nginx: enabled: true @@ -103,4 +36,5 @@ vault: &vault global: vault: *vault + redis: *redis clusterDomain: "svc.cluster.local" diff --git a/scripts/helmcharts/toolings/templates/kyverno.yaml b/scripts/helmcharts/toolings/templates/kyverno.yaml index 9db721726..ee3a9691c 100644 --- a/scripts/helmcharts/toolings/templates/kyverno.yaml +++ b/scripts/helmcharts/toolings/templates/kyverno.yaml @@ -24,6 +24,16 @@ spec: - keys: publicKeys: |- -----BEGIN PUBLIC KEY----- - MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuSUrc90YHUpXwB2E7Hu080K6z+Yc - esqGVAEESg9lEjQUaxOUqRkW3nI/vXRQayLEfBs6ugPNqCH+DbuarI9Jkg== + MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAoLidzRiNIO3l/sWCYw2f + Ct71YSj7UVerhbR81TNEKYtW0fUqg4GagS+esprcXteHPoBI+ZcfL2xJIs0ZNHZs + A+2VXYrsFRgREtABFCwJ2G51ybusoS3jpBsAmSNjG0uzseDxQMTh0arNOlNbhbmI + Tj1ty2JfyLejDKlxavXheKmJGb+7IdDCMmP3f5mXSsJpsOM8SJo49BkvKhTwzjc0 + 01dsSLo5mk9jeG2C6UvPCQeMIUKaf5GlYWyFx7vLZ+z5be9TPuWDH4GO0RtxJVXt + tqmk32aKe+0KDLH0ak9WRVz3ugYEjs+tqdO3y3ALLoGAAI+yGxGSfWFDnDj5AXpA + 2/XYSJAWRzPu35/H3laSrxaApYWN5an69jI30JY7SoEy/k+10oIGe2FGIihXTdq+ + As3IKPEtvuN9s3RTm2ujV/7rEnVVKWiHvQCwH8rxhsbDTeJCoNs8hSBUq1Muttct + EWML8s/TCIK01PyvH6VNQSnc+lRKAJOd5NpZ/SVMXBbrykCQSZPE8RcaQum3nMxE + Tri24VcWfRHj1WwUYzxpmoVE5F1lw0lqQIXlwz+AFhCLGsePSkjFShFtNFQuX22r + Q73JTt3FX4JEzaaKC5BZwXmkEs3MVpQj43HuEqDyejlsPWwRBYwZIzXpoBhOCFHD + t4PI8n+1dSE+uavu/ijgXl8CAwEAAQ== -----END PUBLIC KEY----- diff --git a/scripts/helmcharts/vars.yaml b/scripts/helmcharts/vars.yaml index c77881963..05b3c7231 100644 --- a/scripts/helmcharts/vars.yaml +++ b/scripts/helmcharts/vars.yaml @@ -110,7 +110,7 @@ global: assistJWTSecret: "SetARandomStringHere" s3: region: "us-east-1" - endpoint: "http://minio.db.svc.cluster.local:9000" + endpoint: "http://minio.openreplay.svc.cluster.local:9000" assetsBucket: "sessions-assets" recordingsBucket: "mobs" sourcemapsBucket: "sourcemaps" diff --git a/sourcemap-reader/Dockerfile b/sourcemap-reader/Dockerfile index 493317ba2..9f2b257db 100644 --- a/sourcemap-reader/Dockerfile +++ b/sourcemap-reader/Dockerfile @@ -1,5 +1,7 @@ FROM node:18-alpine LABEL Maintainer="KRAIEM Taha Yassine" +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA RUN apk add --no-cache tini ARG envarg @@ -8,6 +10,7 @@ ENV SOURCE_MAP_VERSION=0.7.4 \ LISTEN_PORT=9000 \ MAPPING_WASM=/work/mappings.wasm \ PRIVATE_ENDPOINTS=true \ + GIT_SHA=$GIT_SHA \ ENTERPRISE_BUILD=${envarg} ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm ${MAPPING_WASM} diff --git a/sourcemap-reader/build.sh b/sourcemap-reader/build.sh index 9767512f3..859347fd4 100644 --- a/sourcemap-reader/build.sh +++ b/sourcemap-reader/build.sh @@ -10,7 +10,8 @@ set -e image_name="sourcemaps-reader" -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} envarg="default-foss" tmp_folder_name="${image_name}_${RANDOM}" @@ -37,14 +38,17 @@ function build_api(){ envarg="default-ee" tag="ee-" } - docker build -f ./Dockerfile --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/${image_name}:${git_sha1} . + docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/${image_name}:${image_tag} . cd ../sourcemap-reader rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/${image_name}:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/${image_name}:${git_sha1} ${DOCKER_REPO:-'local'}/${image_name}:${tag}latest + docker push ${DOCKER_REPO:-'local'}/${image_name}:${image_tag} + docker tag ${DOCKER_REPO:-'local'}/${image_name}:${image_tag} ${DOCKER_REPO:-'local'}/${image_name}:${tag}latest docker push ${DOCKER_REPO:-'local'}/${image_name}:${tag}latest } + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/$image_name:${image_tag} + } echo "${image_name} docker build completed" } diff --git a/utilities/Dockerfile b/utilities/Dockerfile index 08ccba56f..bd47e1c71 100644 --- a/utilities/Dockerfile +++ b/utilities/Dockerfile @@ -1,11 +1,14 @@ FROM node:18-alpine LABEL Maintainer="KRAIEM Taha Yassine" +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA RUN apk add --no-cache tini git libc6-compat && ln -s /lib/libc.musl-x86_64.so.1 /lib/ld-linux-x86-64.so.2 ARG envarg ENV ENTERPRISE_BUILD=${envarg} \ MAXMINDDB_FILE=/home/openreplay/geoip.mmdb \ PRIVATE_ENDPOINTS=false \ + GIT_SHA=$GIT_SHA \ LISTEN_PORT=9001 WORKDIR /work COPY package.json . diff --git a/utilities/build.sh b/utilities/build.sh index 98f244749..87ff7f3e6 100644 --- a/utilities/build.sh +++ b/utilities/build.sh @@ -6,7 +6,8 @@ # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} check_prereq() { which docker || { echo "Docker not installed, please install docker." @@ -26,15 +27,18 @@ function build_api(){ [[ $1 == "ee" ]] && { cp -rf ../ee/utilities/* ./ } - docker build -f ./Dockerfile -t ${DOCKER_REPO:-'local'}/assist:${git_sha1} . + docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/assist:${image_tag} . cd ../utilities rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/assist:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/assist:${git_sha1} ${DOCKER_REPO:-'local'}/assist:latest + docker push ${DOCKER_REPO:-'local'}/assist:${image_tag} + docker tag ${DOCKER_REPO:-'local'}/assist:${image_tag} ${DOCKER_REPO:-'local'}/assist:latest docker push ${DOCKER_REPO:-'local'}/assist:latest } + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/assist:${image_tag} + } echo "build completed for assist" }