From d151c20ed8ec5a56f67d7ce7acf4ff81792cecfd Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Tue, 27 Dec 2022 17:03:48 +0100 Subject: [PATCH 01/19] feat(backend): new metrics for storage (uploading_duration) --- backend/internal/storage/storage.go | 29 +++++++++++++++++++++++++++-- backend/pkg/monitoring/metrics.go | 2 +- 2 files changed, 28 insertions(+), 3 deletions(-) diff --git a/backend/internal/storage/storage.go b/backend/internal/storage/storage.go index 75a91d594..3b315561d 100644 --- a/backend/internal/storage/storage.go +++ b/backend/internal/storage/storage.go @@ -44,6 +44,8 @@ type Storage struct { readingDEVTime syncfloat64.Histogram archivingDOMTime syncfloat64.Histogram archivingDEVTime syncfloat64.Histogram + uploadingDOMTime syncfloat64.Histogram + uploadingDEVTime syncfloat64.Histogram tasks chan *Task ready chan struct{} @@ -85,6 +87,14 @@ func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Stor if err != nil { log.Printf("can't create archiving_duration metric: %s", err) } + uploadingDOMTime, err := metrics.RegisterHistogram("uploading_duration") + if err != nil { + log.Printf("can't create uploading_duration metric: %s", err) + } + uploadingDEVTime, err := metrics.RegisterHistogram("uploading_dt_duration") + if err != nil { + log.Printf("can't create uploading_duration metric: %s", err) + } newStorage := &Storage{ cfg: cfg, s3: s3, @@ -96,6 +106,8 @@ func New(cfg *config.Config, s3 *storage.S3, metrics *monitoring.Metrics) (*Stor readingDEVTime: readingDEVTime, archivingDOMTime: archivingDOMTime, archivingDEVTime: archivingDEVTime, + uploadingDOMTime: uploadingDOMTime, + uploadingDEVTime: uploadingDEVTime, tasks: make(chan *Task, 1), ready: make(chan struct{}), } @@ -119,13 +131,13 @@ func (s *Storage) Upload(msg *messages.SessionEnd) (err error) { wg.Add(2) go func() { if prepErr := s.prepareSession(filePath, DOM, newTask); prepErr != nil { - err = fmt.Errorf("prepareSession err: %s", prepErr) + err = fmt.Errorf("prepareSession DOM err: %s", prepErr) } wg.Done() }() go func() { if prepErr := s.prepareSession(filePath, DEV, newTask); prepErr != nil { - err = fmt.Errorf("prepareSession err: %s", prepErr) + err = fmt.Errorf("prepareSession DEV err: %s", prepErr) } wg.Done() }() @@ -237,33 +249,46 @@ func (s *Storage) compressSession(data []byte) *bytes.Buffer { func (s *Storage) uploadSession(task *Task) { wg := &sync.WaitGroup{} wg.Add(3) + var ( + uploadDoms int64 = 0 + uploadDome int64 = 0 + uploadDev int64 = 0 + ) go func() { if task.doms != nil { + start := time.Now() if err := s.s3.Upload(task.doms, task.id+string(DOM)+"s", "application/octet-stream", true); err != nil { log.Fatalf("Storage: start upload failed. %s", err) } + uploadDoms = time.Now().Sub(start).Milliseconds() } wg.Done() }() go func() { if task.dome != nil { + start := time.Now() if err := s.s3.Upload(task.dome, task.id+string(DOM)+"e", "application/octet-stream", true); err != nil { log.Fatalf("Storage: start upload failed. %s", err) } + uploadDome = time.Now().Sub(start).Milliseconds() } wg.Done() }() go func() { if task.dev != nil { + start := time.Now() if err := s.s3.Upload(task.dev, task.id+string(DEV), "application/octet-stream", true); err != nil { log.Fatalf("Storage: start upload failed. %s", err) } + uploadDev = time.Now().Sub(start).Milliseconds() } wg.Done() }() wg.Wait() // Record metrics ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) + s.uploadingDOMTime.Record(ctx, float64(uploadDoms+uploadDome)) + s.uploadingDEVTime.Record(ctx, float64(uploadDev)) s.totalSessions.Add(ctx, 1) } diff --git a/backend/pkg/monitoring/metrics.go b/backend/pkg/monitoring/metrics.go index d3cd807c6..e407b6272 100644 --- a/backend/pkg/monitoring/metrics.go +++ b/backend/pkg/monitoring/metrics.go @@ -38,7 +38,7 @@ func New(name string) *Metrics { // initPrometheusDataExporter allows to use collected metrics in prometheus func (m *Metrics) initPrometheusDataExporter() { config := prometheus.Config{ - DefaultHistogramBoundaries: []float64{1, 2, 5, 10, 20, 50}, + DefaultHistogramBoundaries: []float64{1, 2, 5, 10, 20, 50, 100, 250, 500, 1000}, } c := controller.New( processor.NewFactory( From ee64d91a7edf726460f71029dc93c69c29881b1f Mon Sep 17 00:00:00 2001 From: Alexander Zavorotynskiy Date: Tue, 27 Dec 2022 18:58:46 +0100 Subject: [PATCH 02/19] feat(backend): added metrics for postgres bulks (size and insert duration) --- backend/pkg/db/postgres/bulk.go | 56 ++++++++++++++++++++-------- backend/pkg/db/postgres/connector.go | 32 ++++++++-------- backend/pkg/monitoring/metrics.go | 12 +++--- 3 files changed, 63 insertions(+), 37 deletions(-) diff --git a/backend/pkg/db/postgres/bulk.go b/backend/pkg/db/postgres/bulk.go index 16f59efcd..7b9bf90c8 100644 --- a/backend/pkg/db/postgres/bulk.go +++ b/backend/pkg/db/postgres/bulk.go @@ -2,8 +2,14 @@ package postgres import ( "bytes" + "context" "errors" "fmt" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/metric/instrument/syncfloat64" + "log" + "openreplay/backend/pkg/monitoring" + "time" ) const ( @@ -18,13 +24,15 @@ type Bulk interface { } type bulkImpl struct { - conn Pool - table string - columns string - template string - setSize int - sizeLimit int - values []interface{} + conn Pool + table string + columns string + template string + setSize int + sizeLimit int + values []interface{} + bulkSize syncfloat64.Histogram + bulkDuration syncfloat64.Histogram } func (b *bulkImpl) Append(args ...interface{}) error { @@ -46,6 +54,8 @@ func (b *bulkImpl) Send() error { } func (b *bulkImpl) send() error { + start := time.Now() + size := len(b.values) / b.setSize request := bytes.NewBufferString(insertPrefix + b.table + b.columns + insertValues) args := make([]interface{}, b.setSize) for i := 0; i < len(b.values)/b.setSize; i++ { @@ -63,13 +73,19 @@ func (b *bulkImpl) send() error { if err != nil { return fmt.Errorf("send bulk err: %s", err) } + // Save bulk metrics + ctx, _ := context.WithTimeout(context.Background(), time.Millisecond*200) + b.bulkDuration.Record(ctx, float64(time.Now().Sub(start).Milliseconds()), attribute.String("table", b.table)) + b.bulkSize.Record(ctx, float64(size), attribute.String("table", b.table)) return nil } -func NewBulk(conn Pool, table, columns, template string, setSize, sizeLimit int) (Bulk, error) { +func NewBulk(conn Pool, metrics *monitoring.Metrics, table, columns, template string, setSize, sizeLimit int) (Bulk, error) { switch { case conn == nil: return nil, errors.New("db conn is empty") + case metrics == nil: + return nil, errors.New("metrics is empty") case table == "": return nil, errors.New("table is empty") case columns == "": @@ -81,13 +97,23 @@ func NewBulk(conn Pool, table, columns, template string, setSize, sizeLimit int) case sizeLimit <= 0: return nil, errors.New("size limit is wrong") } + messagesInBulk, err := metrics.RegisterHistogram("messages_in_bulk") + if err != nil { + log.Printf("can't create messages_size metric: %s", err) + } + bulkInsertDuration, err := metrics.RegisterHistogram("bulk_insert_duration") + if err != nil { + log.Printf("can't create messages_size metric: %s", err) + } return &bulkImpl{ - conn: conn, - table: table, - columns: columns, - template: template, - setSize: setSize, - sizeLimit: sizeLimit, - values: make([]interface{}, 0, setSize*sizeLimit), + conn: conn, + table: table, + columns: columns, + template: template, + setSize: setSize, + sizeLimit: sizeLimit, + values: make([]interface{}, 0, setSize*sizeLimit), + bulkSize: messagesInBulk, + bulkDuration: bulkInsertDuration, }, nil } diff --git a/backend/pkg/db/postgres/connector.go b/backend/pkg/db/postgres/connector.go index 51773c1e2..73859fd39 100644 --- a/backend/pkg/db/postgres/connector.go +++ b/backend/pkg/db/postgres/connector.go @@ -78,7 +78,7 @@ func NewConn(url string, queueLimit, sizeLimit int, metrics *monitoring.Metrics) if err != nil { log.Fatalf("can't create new pool wrapper: %s", err) } - conn.initBulks() + conn.initBulks(metrics) return conn } @@ -107,9 +107,9 @@ func (conn *Conn) initMetrics(metrics *monitoring.Metrics) { } } -func (conn *Conn) initBulks() { +func (conn *Conn) initBulks(metrics *monitoring.Metrics) { var err error - conn.autocompletes, err = NewBulk(conn.c, + conn.autocompletes, err = NewBulk(conn.c, metrics, "autocomplete", "(value, type, project_id)", "($%d, $%d, $%d)", @@ -117,7 +117,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create autocomplete bulk: %s", err) } - conn.requests, err = NewBulk(conn.c, + conn.requests, err = NewBulk(conn.c, metrics, "events_common.requests", "(session_id, timestamp, seq_index, url, duration, success)", "($%d, $%d, $%d, left($%d, 2700), $%d, $%d)", @@ -125,7 +125,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create requests bulk: %s", err) } - conn.customEvents, err = NewBulk(conn.c, + conn.customEvents, err = NewBulk(conn.c, metrics, "events_common.customs", "(session_id, timestamp, seq_index, name, payload)", "($%d, $%d, $%d, left($%d, 2700), $%d)", @@ -133,7 +133,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create customEvents bulk: %s", err) } - conn.webPageEvents, err = NewBulk(conn.c, + conn.webPageEvents, err = NewBulk(conn.c, metrics, "events.pages", "(session_id, message_id, timestamp, referrer, base_referrer, host, path, query, dom_content_loaded_time, "+ "load_time, response_end, first_paint_time, first_contentful_paint_time, speed_index, visually_complete, "+ @@ -144,7 +144,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webInputEvents, err = NewBulk(conn.c, + conn.webInputEvents, err = NewBulk(conn.c, metrics, "events.inputs", "(session_id, message_id, timestamp, value, label)", "($%d, $%d, $%d, $%d, NULLIF($%d,''))", @@ -152,7 +152,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webGraphQL, err = NewBulk(conn.c, + conn.webGraphQL, err = NewBulk(conn.c, metrics, "events.graphql", "(session_id, timestamp, message_id, name, request_body, response_body)", "($%d, $%d, $%d, left($%d, 2700), $%d, $%d)", @@ -160,7 +160,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webPageEvents bulk: %s", err) } - conn.webErrors, err = NewBulk(conn.c, + conn.webErrors, err = NewBulk(conn.c, metrics, "errors", "(error_id, project_id, source, name, message, payload)", "($%d, $%d, $%d, $%d, $%d, $%d::jsonb)", @@ -168,7 +168,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webErrors bulk: %s", err) } - conn.webErrorEvents, err = NewBulk(conn.c, + conn.webErrorEvents, err = NewBulk(conn.c, metrics, "events.errors", "(session_id, message_id, timestamp, error_id)", "($%d, $%d, $%d, $%d)", @@ -176,7 +176,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webErrorEvents bulk: %s", err) } - conn.webErrorTags, err = NewBulk(conn.c, + conn.webErrorTags, err = NewBulk(conn.c, metrics, "public.errors_tags", "(session_id, message_id, error_id, key, value)", "($%d, $%d, $%d, $%d, $%d)", @@ -184,7 +184,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webErrorEvents bulk: %s", err) } - conn.webIssues, err = NewBulk(conn.c, + conn.webIssues, err = NewBulk(conn.c, metrics, "issues", "(project_id, issue_id, type, context_string)", "($%d, $%d, $%d, $%d)", @@ -192,7 +192,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webIssues bulk: %s", err) } - conn.webIssueEvents, err = NewBulk(conn.c, + conn.webIssueEvents, err = NewBulk(conn.c, metrics, "events_common.issues", "(session_id, issue_id, timestamp, seq_index, payload)", "($%d, $%d, $%d, $%d, CAST($%d AS jsonb))", @@ -200,7 +200,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webIssueEvents bulk: %s", err) } - conn.webCustomEvents, err = NewBulk(conn.c, + conn.webCustomEvents, err = NewBulk(conn.c, metrics, "events_common.customs", "(session_id, seq_index, timestamp, name, payload, level)", "($%d, $%d, $%d, left($%d, 2700), $%d, $%d)", @@ -208,7 +208,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webCustomEvents bulk: %s", err) } - conn.webClickEvents, err = NewBulk(conn.c, + conn.webClickEvents, err = NewBulk(conn.c, metrics, "events.clicks", "(session_id, message_id, timestamp, label, selector, url, path)", "($%d, $%d, $%d, NULLIF($%d, ''), $%d, $%d, $%d)", @@ -216,7 +216,7 @@ func (conn *Conn) initBulks() { if err != nil { log.Fatalf("can't create webClickEvents bulk: %s", err) } - conn.webNetworkRequest, err = NewBulk(conn.c, + conn.webNetworkRequest, err = NewBulk(conn.c, metrics, "events_common.requests", "(session_id, timestamp, seq_index, url, host, path, query, request_body, response_body, status_code, method, duration, success)", "($%d, $%d, $%d, left($%d, 2700), $%d, $%d, $%d, $%d, $%d, $%d::smallint, NULLIF($%d, '')::http_method, $%d, $%d)", diff --git a/backend/pkg/monitoring/metrics.go b/backend/pkg/monitoring/metrics.go index e407b6272..803fba127 100644 --- a/backend/pkg/monitoring/metrics.go +++ b/backend/pkg/monitoring/metrics.go @@ -76,8 +76,8 @@ Counter is a synchronous instrument that measures additive non-decreasing values */ func (m *Metrics) RegisterCounter(name string) (syncfloat64.Counter, error) { - if _, ok := m.counters[name]; ok { - return nil, fmt.Errorf("counter %s already exists", name) + if counter, ok := m.counters[name]; ok { + return counter, nil } counter, err := m.meter.SyncFloat64().Counter(name) if err != nil { @@ -100,8 +100,8 @@ for example, the number of: */ func (m *Metrics) RegisterUpDownCounter(name string) (syncfloat64.UpDownCounter, error) { - if _, ok := m.upDownCounters[name]; ok { - return nil, fmt.Errorf("upDownCounter %s already exists", name) + if counter, ok := m.upDownCounters[name]; ok { + return counter, nil } counter, err := m.meter.SyncFloat64().UpDownCounter(name) if err != nil { @@ -122,8 +122,8 @@ Histogram is a synchronous instrument that produces a histogram from recorded va */ func (m *Metrics) RegisterHistogram(name string) (syncfloat64.Histogram, error) { - if _, ok := m.histograms[name]; ok { - return nil, fmt.Errorf("histogram %s already exists", name) + if hist, ok := m.histograms[name]; ok { + return hist, nil } hist, err := m.meter.SyncFloat64().Histogram(name) if err != nil { From cd5ce825e2f79f32d59ac56f306b99e97571b67a Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Wed, 28 Dec 2022 09:58:35 +0100 Subject: [PATCH 03/19] ci(actions): Remove duplicate to build Signed-off-by: rjshrjndrn --- .github/workflows/workers-ee.yaml | 4 +--- .github/workflows/workers.yaml | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/workers-ee.yaml b/.github/workflows/workers-ee.yaml index 3f32314f9..cd0288277 100644 --- a/.github/workflows/workers-ee.yaml +++ b/.github/workflows/workers-ee.yaml @@ -71,12 +71,10 @@ jobs: case ${build_param} in false) { - git diff --name-only HEAD HEAD~1 | grep -E "backend/cmd|backend/services" | grep -vE ^ee/ | cut -d '/' -f3 - git diff --name-only HEAD HEAD~1 | grep -E "backend/pkg|backend/internal" | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do grep -rl "pkg/$pkg_name" backend/services backend/cmd | cut -d '/' -f3 done - } | uniq > /tmp/images_to_build.txt + } | awk '!seen[$0]++' > /tmp/images_to_build.txt ;; all) ls backend/cmd > /tmp/images_to_build.txt diff --git a/.github/workflows/workers.yaml b/.github/workflows/workers.yaml index 21fc55c06..0d9927df9 100644 --- a/.github/workflows/workers.yaml +++ b/.github/workflows/workers.yaml @@ -71,12 +71,10 @@ jobs: case ${build_param} in false) { - git diff --name-only HEAD HEAD~1 | grep -E "backend/cmd|backend/services" | grep -vE ^ee/ | cut -d '/' -f3 - git diff --name-only HEAD HEAD~1 | grep -E "backend/pkg|backend/internal" | grep -vE ^ee/ | cut -d '/' -f3 | uniq | while read -r pkg_name ; do grep -rl "pkg/$pkg_name" backend/services backend/cmd | cut -d '/' -f3 done - } | uniq > /tmp/images_to_build.txt + } | awk '!seen[$0]++' > /tmp/images_to_build.txt ;; all) ls backend/cmd > /tmp/images_to_build.txt From dfcd9673d200d2c3698b78d5bed3c832787bd3e8 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Wed, 28 Dec 2022 12:15:12 +0100 Subject: [PATCH 04/19] ci(actions): Enable alerts for failed builds Signed-off-by: rjshrjndrn --- .github/workflows/workers-ee.yaml | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/.github/workflows/workers-ee.yaml b/.github/workflows/workers-ee.yaml index cd0288277..b75d22093 100644 --- a/.github/workflows/workers-ee.yaml +++ b/.github/workflows/workers-ee.yaml @@ -158,17 +158,16 @@ jobs: # Deploy command helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true | kubectl apply -f - - #- name: Alert slack - # if: ${{ failure() }} - # uses: rtCamp/action-slack-notify@v2 - # env: - # SLACK_CHANNEL: ee - # SLACK_TITLE: "Failed ${{ github.workflow }}" - # SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' - # SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} - # SLACK_USERNAME: "OR Bot" - # SLACK_MESSAGE: 'Build failed :bomb:' - + - name: Alert slack + if: ${{ failure() }} + uses: rtCamp/action-slack-notify@v2 + env: + SLACK_CHANNEL: ee + SLACK_TITLE: "Failed ${{ github.workflow }}" + SLACK_COLOR: ${{ job.status }} # or a specific color like 'good' or '#ff00ff' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEB_HOOK }} + SLACK_USERNAME: "OR Bot" + SLACK_MESSAGE: 'Build failed :bomb:' # - name: Debug Job # if: ${{ failure() }} From 4042ad940647ae3a818170e114b878e7cb877eef Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Wed, 28 Dec 2022 12:44:48 +0100 Subject: [PATCH 05/19] build(docker): Adding gitsha as env Signed-off-by: rjshrjndrn --- backend/Dockerfile | 4 ++++ backend/build.sh | 9 +++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/backend/Dockerfile b/backend/Dockerfile index ae6a5db18..043de51cd 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -19,10 +19,14 @@ RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 go build -o service -tags dynamic open FROM alpine AS entrypoint +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA + RUN apk add --no-cache ca-certificates librdkafka-dev cyrus-sasl cyrus-sasl-gssapiv2 krb5 RUN adduser -u 1001 openreplay -D ENV TZ=UTC \ + GIT_SHA=$GIT_SHA \ FS_ULIMIT=1000 \ FS_DIR=/mnt/efs \ MAXMINDDB_FILE=/home/openreplay/geoip.mmdb \ diff --git a/backend/build.sh b/backend/build.sh index 073f540df..6ff4cd4ef 100755 --- a/backend/build.sh +++ b/backend/build.sh @@ -9,7 +9,8 @@ # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh set -e -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} ee="false" check_prereq() { which docker || { @@ -22,9 +23,9 @@ check_prereq() { function build_service() { image="$1" echo "BUILDING $image" - docker build -t ${DOCKER_REPO:-'local'}/$image:${git_sha1} --platform linux/amd64 --build-arg SERVICE_NAME=$image . + docker build -t ${DOCKER_REPO:-'local'}/$image:${image_tag} --platform linux/amd64 --build-arg SERVICE_NAME=$image --build-arg GIT_SHA=$git_sha . [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/$image:${git_sha1} + docker push ${DOCKER_REPO:-'local'}/$image:${image_tag} } echo "Build completed for $image" return @@ -51,7 +52,7 @@ function build_api(){ for image in $(ls cmd); do build_service $image - echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${git_sha1}" + echo "::set-output name=image::${DOCKER_REPO:-'local'}/$image:${image_tag}" done cd ../backend rm -rf ../${destination} From e96f1fce0f60c94b32e629260c7b086f2b42a750 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Wed, 28 Dec 2022 14:19:38 +0100 Subject: [PATCH 06/19] build(docker): Adding git sha --- api/Dockerfile | 6 +++++- api/Dockerfile_alerts | 4 ++++ api/build.sh | 9 +++++---- api/build_alerts.sh | 9 +++++---- frontend/Dockerfile | 4 ++++ frontend/build.sh | 7 ++++--- peers/Dockerfile | 3 +++ peers/build.sh | 9 +++++---- scripts/helmcharts/vars.yaml | 2 +- sourcemap-reader/Dockerfile | 3 +++ sourcemap-reader/build.sh | 9 +++++---- utilities/Dockerfile | 3 +++ utilities/build.sh | 9 +++++---- 13 files changed, 52 insertions(+), 25 deletions(-) diff --git a/api/Dockerfile b/api/Dockerfile index 036dcb0f2..dec3156cb 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,6 +1,9 @@ FROM python:3.10-alpine LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA + RUN apk add --no-cache build-base tini ARG envarg # Add Tini @@ -9,7 +12,8 @@ ENV SOURCE_MAP_VERSION=0.7.4 \ APP_NAME=chalice \ LISTEN_PORT=8000 \ PRIVATE_ENDPOINTS=false \ - ENTERPRISE_BUILD=${envarg} + ENTERPRISE_BUILD=${envarg} \ + GIT_SHA=$GIT_SHA WORKDIR /work COPY requirements.txt ./requirements.txt diff --git a/api/Dockerfile_alerts b/api/Dockerfile_alerts index 881b21fb9..117e28456 100644 --- a/api/Dockerfile_alerts +++ b/api/Dockerfile_alerts @@ -1,6 +1,9 @@ FROM python:3.10-alpine LABEL Maintainer="Rajesh Rajendran" LABEL Maintainer="KRAIEM Taha Yassine" +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA + RUN apk add --no-cache build-base tini ARG envarg ENV APP_NAME=alerts \ @@ -8,6 +11,7 @@ ENV APP_NAME=alerts \ PG_MAXCONN=10 \ LISTEN_PORT=8000 \ PRIVATE_ENDPOINTS=true \ + GIT_SHA=$GIT_SHA \ ENTERPRISE_BUILD=${envarg} WORKDIR /work diff --git a/api/build.sh b/api/build.sh index 895f9bb8e..c5610e233 100644 --- a/api/build.sh +++ b/api/build.sh @@ -16,7 +16,8 @@ exit_err() { } environment=$1 -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} envarg="default-foss" check_prereq() { which docker || { @@ -41,12 +42,12 @@ function build_api(){ tag="ee-" } mv Dockerfile.dockerignore .dockerignore - docker build -f ./Dockerfile --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/chalice:${git_sha1} . + docker build -f ./Dockerfile --build-arg envarg=$envarg --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/chalice:${image_tag} . cd ../api rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/chalice:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/chalice:${git_sha1} ${DOCKER_REPO:-'local'}/chalice:${tag}latest + docker push ${DOCKER_REPO:-'local'}/chalice:${image_tag} + docker tag ${DOCKER_REPO:-'local'}/chalice:${image_tag} ${DOCKER_REPO:-'local'}/chalice:${tag}latest docker push ${DOCKER_REPO:-'local'}/chalice:${tag}latest } echo "api docker build completed" diff --git a/api/build_alerts.sh b/api/build_alerts.sh index a36472a8d..81c4cdb36 100644 --- a/api/build_alerts.sh +++ b/api/build_alerts.sh @@ -7,7 +7,8 @@ # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} envarg="default-foss" check_prereq() { which docker || { @@ -31,12 +32,12 @@ function build_alerts(){ tag="ee-" } mv Dockerfile_alerts.dockerignore .dockerignore - docker build -f ./Dockerfile_alerts --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/alerts:${git_sha1} . + docker build -f ./Dockerfile_alerts --build-arg envarg=$envarg --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/alerts:${image_tag} . cd ../api rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/alerts:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/alerts:${git_sha1} ${DOCKER_REPO:-'local'}/alerts:${tag}latest + docker push ${DOCKER_REPO:-'local'}/alerts:${image_tag} + docker tag ${DOCKER_REPO:-'local'}/alerts:${image_tag} ${DOCKER_REPO:-'local'}/alerts:${tag}latest docker push ${DOCKER_REPO:-'local'}/alerts:${tag}latest } echo "completed alerts build" diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 5e6c9b3b0..ccd4655ee 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -14,9 +14,13 @@ COPY nginx.conf /etc/nginx/conf.d/default.conf # Default step in docker build FROM nginx:alpine LABEL maintainer=Rajesh +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA COPY --from=builder /work/public /var/www/openreplay COPY nginx.conf /etc/nginx/conf.d/default.conf +ENV GIT_SHA=$GIT_SHA + EXPOSE 8080 RUN chown -R nginx:nginx /var/cache/nginx && \ chown -R nginx:nginx /var/log/nginx && \ diff --git a/frontend/build.sh b/frontend/build.sh index f57d98af2..33b726c1a 100644 --- a/frontend/build.sh +++ b/frontend/build.sh @@ -8,7 +8,8 @@ # Example # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} ee="false" check_prereq() { which docker || { @@ -21,9 +22,9 @@ check_prereq() { export DOCKER_BUILDKIT=1 function build(){ # Run docker as the same user, else we'll run in to permission issues. - docker build -t ${DOCKER_REPO:-'local'}/frontend:${git_sha1} --platform linux/amd64 --build-arg SERVICE_NAME=$image . + docker build -t ${DOCKER_REPO:-'local'}/frontend:${image_tag} --platform linux/amd64 --build-arg GIT_SHA=$git_sha . [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/frontend:${git_sha1} + docker push ${DOCKER_REPO:-'local'}/frontend:${image_tag} } echo "frotend build completed" } diff --git a/peers/Dockerfile b/peers/Dockerfile index bfabf6bea..2c8254482 100644 --- a/peers/Dockerfile +++ b/peers/Dockerfile @@ -1,8 +1,11 @@ FROM node:18-alpine LABEL Maintainer="KRAIEM Taha Yassine" +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA RUN apk add --no-cache tini ARG envarg ENV PRIVATE_ENDPOINTS=false \ + GIT_SHA=$GIT_SHA \ ENTERPRISE_BUILD=${envarg} WORKDIR /work diff --git a/peers/build.sh b/peers/build.sh index e84b942ec..25aa0d7ed 100644 --- a/peers/build.sh +++ b/peers/build.sh @@ -6,7 +6,8 @@ # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} check_prereq() { which docker || { echo "Docker not installed, please install docker." @@ -26,12 +27,12 @@ function build_api(){ [[ $1 == "ee" ]] && { cp -rf ../ee/peers/* ./ } - docker build -f ./Dockerfile -t ${DOCKER_REPO:-'local'}/peers:${git_sha1} . + docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/peers:${image_tag} . cd ../peers rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/peers:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/peers:${git_sha1} ${DOCKER_REPO:-'local'}/peers:latest + docker push ${DOCKER_REPO:-'local'}/peers:${image_tag} + docker tag ${DOCKER_REPO:-'local'}/peers:${image_tag} ${DOCKER_REPO:-'local'}/peers:latest docker push ${DOCKER_REPO:-'local'}/peers:latest } echo "peer docker build complted" diff --git a/scripts/helmcharts/vars.yaml b/scripts/helmcharts/vars.yaml index c77881963..05b3c7231 100644 --- a/scripts/helmcharts/vars.yaml +++ b/scripts/helmcharts/vars.yaml @@ -110,7 +110,7 @@ global: assistJWTSecret: "SetARandomStringHere" s3: region: "us-east-1" - endpoint: "http://minio.db.svc.cluster.local:9000" + endpoint: "http://minio.openreplay.svc.cluster.local:9000" assetsBucket: "sessions-assets" recordingsBucket: "mobs" sourcemapsBucket: "sourcemaps" diff --git a/sourcemap-reader/Dockerfile b/sourcemap-reader/Dockerfile index 493317ba2..9f2b257db 100644 --- a/sourcemap-reader/Dockerfile +++ b/sourcemap-reader/Dockerfile @@ -1,5 +1,7 @@ FROM node:18-alpine LABEL Maintainer="KRAIEM Taha Yassine" +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA RUN apk add --no-cache tini ARG envarg @@ -8,6 +10,7 @@ ENV SOURCE_MAP_VERSION=0.7.4 \ LISTEN_PORT=9000 \ MAPPING_WASM=/work/mappings.wasm \ PRIVATE_ENDPOINTS=true \ + GIT_SHA=$GIT_SHA \ ENTERPRISE_BUILD=${envarg} ADD https://unpkg.com/source-map@${SOURCE_MAP_VERSION}/lib/mappings.wasm ${MAPPING_WASM} diff --git a/sourcemap-reader/build.sh b/sourcemap-reader/build.sh index 9767512f3..33d318cab 100644 --- a/sourcemap-reader/build.sh +++ b/sourcemap-reader/build.sh @@ -10,7 +10,8 @@ set -e image_name="sourcemaps-reader" -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} envarg="default-foss" tmp_folder_name="${image_name}_${RANDOM}" @@ -37,12 +38,12 @@ function build_api(){ envarg="default-ee" tag="ee-" } - docker build -f ./Dockerfile --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/${image_name}:${git_sha1} . + docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha --build-arg envarg=$envarg -t ${DOCKER_REPO:-'local'}/${image_name}:${image_tag} . cd ../sourcemap-reader rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/${image_name}:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/${image_name}:${git_sha1} ${DOCKER_REPO:-'local'}/${image_name}:${tag}latest + docker push ${DOCKER_REPO:-'local'}/${image_name}:${image_tag} + docker tag ${DOCKER_REPO:-'local'}/${image_name}:${image_tag} ${DOCKER_REPO:-'local'}/${image_name}:${tag}latest docker push ${DOCKER_REPO:-'local'}/${image_name}:${tag}latest } echo "${image_name} docker build completed" diff --git a/utilities/Dockerfile b/utilities/Dockerfile index 08ccba56f..bd47e1c71 100644 --- a/utilities/Dockerfile +++ b/utilities/Dockerfile @@ -1,11 +1,14 @@ FROM node:18-alpine LABEL Maintainer="KRAIEM Taha Yassine" +ARG GIT_SHA +LABEL GIT_SHA=$GIT_SHA RUN apk add --no-cache tini git libc6-compat && ln -s /lib/libc.musl-x86_64.so.1 /lib/ld-linux-x86-64.so.2 ARG envarg ENV ENTERPRISE_BUILD=${envarg} \ MAXMINDDB_FILE=/home/openreplay/geoip.mmdb \ PRIVATE_ENDPOINTS=false \ + GIT_SHA=$GIT_SHA \ LISTEN_PORT=9001 WORKDIR /work COPY package.json . diff --git a/utilities/build.sh b/utilities/build.sh index 98f244749..d1042375b 100644 --- a/utilities/build.sh +++ b/utilities/build.sh @@ -6,7 +6,8 @@ # Usage: IMAGE_TAG=latest DOCKER_REPO=myDockerHubID bash build.sh -git_sha1=${IMAGE_TAG:-$(git rev-parse HEAD)} +git_sha=$(git rev-parse --short HEAD) +image_tag=${IMAGE_TAG:-git_sha} check_prereq() { which docker || { echo "Docker not installed, please install docker." @@ -26,13 +27,13 @@ function build_api(){ [[ $1 == "ee" ]] && { cp -rf ../ee/utilities/* ./ } - docker build -f ./Dockerfile -t ${DOCKER_REPO:-'local'}/assist:${git_sha1} . + docker build -f ./Dockerfile --build-arg GIT_SHA=$git_sha -t ${DOCKER_REPO:-'local'}/assist:${image_tag} . cd ../utilities rm -rf ../${destination} [[ $PUSH_IMAGE -eq 1 ]] && { - docker push ${DOCKER_REPO:-'local'}/assist:${git_sha1} - docker tag ${DOCKER_REPO:-'local'}/assist:${git_sha1} ${DOCKER_REPO:-'local'}/assist:latest + docker push ${DOCKER_REPO:-'local'}/assist:${image_tag} + docker tag ${DOCKER_REPO:-'local'}/assist:${image_tag} ${DOCKER_REPO:-'local'}/assist:latest docker push ${DOCKER_REPO:-'local'}/assist:latest } echo "build completed for assist" From 2ebf04099353ef70395b8c8f5e130f70e1ed0814 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Wed, 28 Dec 2022 15:39:05 +0100 Subject: [PATCH 07/19] ci(actions): Deploy utilities from correct folder Signed-off-by: rjshrjndrn --- .github/workflows/utilities.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/utilities.yaml b/.github/workflows/utilities.yaml index 92e130c84..afbc85043 100644 --- a/.github/workflows/utilities.yaml +++ b/.github/workflows/utilities.yaml @@ -43,7 +43,7 @@ jobs: PUSH_IMAGE=1 bash build.sh - name: Deploy to kubernetes run: | - cd scripts/helm/ + cd scripts/helmcharts/ sed -i "s#openReplayContainerRegistry.*#openReplayContainerRegistry: \"${{ secrets.OSS_REGISTRY_URL }}\"#g" vars.yaml sed -i "s#minio_access_key.*#minio_access_key: \"${{ secrets.OSS_MINIO_ACCESS_KEY }}\" #g" vars.yaml sed -i "s#minio_secret_key.*#minio_secret_key: \"${{ secrets.OSS_MINIO_SECRET_KEY }}\" #g" vars.yaml From 4022ca935329530553dd5690d523c3e14efb6138 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Fri, 30 Dec 2022 12:31:25 +0100 Subject: [PATCH 08/19] fix(ci): push chalice images Signed-off-by: rjshrjndrn --- .github/workflows/api-ee.yaml | 5 ++++- .github/workflows/api.yaml | 7 +++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/.github/workflows/api-ee.yaml b/.github/workflows/api-ee.yaml index a6a3998cb..34997b348 100644 --- a/.github/workflows/api-ee.yaml +++ b/.github/workflows/api-ee.yaml @@ -67,7 +67,10 @@ jobs: } && { echo "Skipping Security Checks" } - docker push $DOCKER_REPO/$image:$IMAGE_TAG + images=("chalice" "alerts") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done - name: Creating old image input run: | # diff --git a/.github/workflows/api.yaml b/.github/workflows/api.yaml index e85775ed2..ec83c1c3c 100644 --- a/.github/workflows/api.yaml +++ b/.github/workflows/api.yaml @@ -66,7 +66,10 @@ jobs: } && { echo "Skipping Security Checks" } - docker push $DOCKER_REPO/$image:$IMAGE_TAG + images=("chalice" "alerts") + for image in ${images[*]};do + docker push $DOCKER_REPO/$image:$IMAGE_TAG + done - name: Creating old image input run: | # @@ -131,4 +134,4 @@ jobs: # DOCKER_REPO: ${{ secrets.OSS_REGISTRY_URL }} # IMAGE_TAG: ${{ github.sha }} # ENVIRONMENT: staging - # + From 7d1628f4fb396576bc6581ea304353583006e0ad Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Fri, 30 Dec 2022 15:21:27 +0100 Subject: [PATCH 09/19] chore(ci): Ignore vulnerabilities till fix is published in distros. Signed-off-by: rjshrjndrn --- api/.trivyignore | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 api/.trivyignore diff --git a/api/.trivyignore b/api/.trivyignore new file mode 100644 index 000000000..02f167862 --- /dev/null +++ b/api/.trivyignore @@ -0,0 +1,3 @@ +# Accept the risk until +# python setup tools recently fixed. Not yet avaialable in distros. +CVE-2022-40897 exp:2023-02-01 From 1fbd730a3ef05605a2bbc70876932e8bb94dc7e9 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Fri, 30 Dec 2022 15:31:36 +0100 Subject: [PATCH 10/19] fix(ci): chalice ee deployment Signed-off-by: rjshrjndrn --- .github/workflows/api-ee.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/api-ee.yaml b/.github/workflows/api-ee.yaml index 34997b348..f30c1b111 100644 --- a/.github/workflows/api-ee.yaml +++ b/.github/workflows/api-ee.yaml @@ -110,7 +110,7 @@ jobs: cat /tmp/image_override.yaml # Deploy command - helm upgrade --install openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set skipMigration=true --no-hooks + helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -n app -f - env: DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }} # We're not passing -ee flag, because helm will add that. From 038b4ec4f08b5e23aa2b34894ef0855676d4ae3c Mon Sep 17 00:00:00 2001 From: sylenien Date: Mon, 2 Jan 2023 14:12:11 +0100 Subject: [PATCH 11/19] fix(ui): fix jwt and user state syncing --- frontend/app/Router.js | 2 +- frontend/app/api_client.js | 2 +- frontend/app/api_middleware.js | 8 ++--- frontend/app/components/Login/Login.js | 2 +- frontend/app/components/Session/WebPlayer.tsx | 2 +- frontend/app/duck/index.ts | 2 -- frontend/app/duck/jwt.js | 19 ------------ frontend/app/duck/user.js | 30 ++++++++++++------- frontend/app/store.js | 18 +++++++---- 9 files changed, 39 insertions(+), 46 deletions(-) delete mode 100644 frontend/app/duck/jwt.js diff --git a/frontend/app/Router.js b/frontend/app/Router.js index e99ee2546..8ee29802a 100644 --- a/frontend/app/Router.js +++ b/frontend/app/Router.js @@ -90,7 +90,7 @@ const MULTIVIEW_INDEX_PATH = routes.multiviewIndex(); @connect( (state) => { const siteId = state.getIn(['site', 'siteId']); - const jwt = state.get('jwt'); + const jwt = state.getIn(['user', 'jwt']); const changePassword = state.getIn(['user', 'account', 'changePassword']); const userInfoLoading = state.getIn(['user', 'fetchUserInfoRequest', 'loading']); return { diff --git a/frontend/app/api_client.js b/frontend/app/api_client.js index b60c0dbb7..c0ccf9bcd 100644 --- a/frontend/app/api_client.js +++ b/frontend/app/api_client.js @@ -58,7 +58,7 @@ export const clean = (obj, forbidenValues = [ undefined, '' ]) => { export default class APIClient { constructor() { - const jwt = store.getState().get('jwt'); + const jwt = store.getState().getIn(['user', 'jwt']); const siteId = store.getState().getIn([ 'site', 'siteId' ]); this.init = { headers: { diff --git a/frontend/app/api_middleware.js b/frontend/app/api_middleware.js index 1846a9dbc..8ccc6bc37 100644 --- a/frontend/app/api_middleware.js +++ b/frontend/app/api_middleware.js @@ -1,8 +1,8 @@ import logger from 'App/logger'; import APIClient from './api_client'; -import { UPDATE, DELETE } from './duck/jwt'; +import { UPDATE_JWT } from './duck/user'; -export default (store) => (next) => (action) => { +export default () => (next) => (action) => { const { types, call, ...rest } = action; if (!call) { return next(action); @@ -14,7 +14,7 @@ export default (store) => (next) => (action) => { return call(client) .then(async (response) => { if (response.status === 403) { - next({ type: DELETE }); + next({ type: UPDATE_JWT, data: null }); } if (!response.ok) { const text = await response.text(); @@ -30,7 +30,7 @@ export default (store) => (next) => (action) => { next({ type: SUCCESS, data, ...rest }); } if (jwt) { - next({ type: UPDATE, data: jwt }); + next({ type: UPDATE_JWT, data: jwt }); } }) .catch((e) => { diff --git a/frontend/app/components/Login/Login.js b/frontend/app/components/Login/Login.js index c04e83f12..70ee0f74e 100644 --- a/frontend/app/components/Login/Login.js +++ b/frontend/app/components/Login/Login.js @@ -8,7 +8,7 @@ import ReCAPTCHA from 'react-google-recaptcha'; import { withRouter } from 'react-router-dom'; import stl from './login.module.css'; import cn from 'classnames'; -import { setJwt } from 'Duck/jwt'; +import { setJwt } from 'Duck/user'; const FORGOT_PASSWORD = forgotPassword(); const SIGNUP_ROUTE = signup(); diff --git a/frontend/app/components/Session/WebPlayer.tsx b/frontend/app/components/Session/WebPlayer.tsx index 6e169777e..e6f091b2c 100644 --- a/frontend/app/components/Session/WebPlayer.tsx +++ b/frontend/app/components/Session/WebPlayer.tsx @@ -115,7 +115,7 @@ function WebPlayer(props: any) { export default connect( (state: any) => ({ session: state.getIn(['sessions', 'current']), - jwt: state.get('jwt'), + jwt: state.getIn(['user', 'jwt']), fullscreen: state.getIn(['components', 'player', 'fullscreen']), showEvents: state.get('showEvents'), members: state.getIn(['members', 'list']), diff --git a/frontend/app/duck/index.ts b/frontend/app/duck/index.ts index fd4b0a655..0371396bf 100644 --- a/frontend/app/duck/index.ts +++ b/frontend/app/duck/index.ts @@ -1,6 +1,5 @@ import { combineReducers } from 'redux-immutable'; -import jwt from './jwt'; import user from './user'; import sessions from './sessions'; import assignments from './assignments'; @@ -34,7 +33,6 @@ import search from './search'; import liveSearch from './liveSearch'; const rootReducer = combineReducers({ - jwt, user, sessions, assignments, diff --git a/frontend/app/duck/jwt.js b/frontend/app/duck/jwt.js deleted file mode 100644 index 4d4147f34..000000000 --- a/frontend/app/duck/jwt.js +++ /dev/null @@ -1,19 +0,0 @@ -export const UPDATE = 'jwt/UPDATE'; -export const DELETE = 'jwt/DELETE'; - -export default (state = null, action = {}) => { - switch (action.type) { - case UPDATE: - return action.data; - case DELETE: - return null; - } - return state; -}; - -export function setJwt(data) { - return { - type: UPDATE, - data, - }; -} diff --git a/frontend/app/duck/user.js b/frontend/app/duck/user.js index 5b39d34db..8ef2bf5ab 100644 --- a/frontend/app/duck/user.js +++ b/frontend/app/duck/user.js @@ -20,7 +20,7 @@ const PUT_CLIENT = new RequestTypes('user/PUT_CLIENT'); const PUSH_NEW_SITE = 'user/PUSH_NEW_SITE'; const SET_ONBOARDING = 'user/SET_ONBOARDING'; -const initialState = Map({ +export const initialState = Map({ account: Account(), siteId: null, passwordRequestError: false, @@ -28,7 +28,8 @@ const initialState = Map({ tenants: [], authDetails: {}, onboarding: false, - sites: List() + sites: List(), + jwt: null }); const setClient = (state, data) => { @@ -36,8 +37,19 @@ const setClient = (state, data) => { return state.set('client', client) } +export const UPDATE_JWT = 'jwt/UPDATE'; +export function setJwt(data) { + return { + type: UPDATE_JWT, + data, + }; +} + + const reducer = (state = initialState, action = {}) => { switch (action.type) { + case UPDATE_JWT: + return state.set('jwt', action.data); case RESET_PASSWORD.SUCCESS: case UPDATE_PASSWORD.SUCCESS: case LOGIN.SUCCESS: @@ -54,9 +66,11 @@ const reducer = (state = initialState, action = {}) => { // return state.set('tenants', action.data.map(i => ({ text: i.name, value: i.tenantId}))); case UPDATE_PASSWORD.FAILURE: return state.set('passwordErrors', List(action.errors)) + case FETCH_ACCOUNT.FAILURE: + case LOGIN.FAILURE: case DELETE: + console.log('hi') deleteCookie('jwt', '/', '.openreplay.com') - console.log('called') return initialState; case PUT_CLIENT.REQUEST: return state.mergeIn([ 'account' ], action.params); @@ -115,16 +129,10 @@ export function fetchTenants() { } } -export const fetchUserInfo = () => dispatch => Promise.all([ - dispatch({ +export const fetchUserInfo = () => ({ types: FETCH_ACCOUNT.toArray(), call: client => client.get('/account'), - }), - // dispatch({ - // types: FETCH_CLIENT.toArray(), - // call: client => client.get('/client'), - // }), -]); + }); export function logout() { return { diff --git a/frontend/app/store.js b/frontend/app/store.js index dd1434a0c..a2379496c 100644 --- a/frontend/app/store.js +++ b/frontend/app/store.js @@ -4,26 +4,32 @@ import { Map } from 'immutable'; import indexReducer from './duck'; import apiMiddleware from './api_middleware'; import LocalStorage from './local_storage'; +import { initialState as initUserState, UPDATE_JWT } from './duck/user' const storage = new LocalStorage({ - jwt: String, + user: Object, }); const composeEnhancers = window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ && window.env.NODE_ENV === "development" ? window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ : compose; const storageState = storage.state(); -const initialState = Map({ - jwt: storageState.jwt, - // TODO: store user -}); +const initialState = Map({ user: initUserState.update('jwt', () => storageState.user?.jwt || null) }); const store = createStore(indexReducer, initialState, composeEnhancers(applyMiddleware(thunk, apiMiddleware))); store.subscribe(() => { const state = store.getState(); + storage.sync({ - jwt: state.get('jwt') + user: state.get('user') }); }); +window.getJWT = () => { + console.log(JSON.stringify(storage.state().user?.jwt || 'not logged in')); +} +window.setJWT = (jwt) => { + store.dispatch({ type: UPDATE_JWT, data: jwt }) +} + export default store; From 91e077d9b2409de8a3531ceb40ea40aac0bb0885 Mon Sep 17 00:00:00 2001 From: sylenien Date: Mon, 2 Jan 2023 14:51:48 +0100 Subject: [PATCH 12/19] fix(ui): fix duck type --- frontend/app/duck/user.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/app/duck/user.js b/frontend/app/duck/user.js index 8ef2bf5ab..36c7dd778 100644 --- a/frontend/app/duck/user.js +++ b/frontend/app/duck/user.js @@ -2,7 +2,6 @@ import { List, Map } from 'immutable'; import Client from 'Types/client'; import { deleteCookie } from 'App/utils'; import Account from 'Types/account'; -import { DELETE } from './jwt'; import withRequestState, { RequestTypes } from './requestStateCreator'; export const LOGIN = new RequestTypes('user/LOGIN'); @@ -38,6 +37,7 @@ const setClient = (state, data) => { } export const UPDATE_JWT = 'jwt/UPDATE'; +export const DELETE = 'jwt/DELETE' export function setJwt(data) { return { type: UPDATE_JWT, From b4181fb9a9ae9a217d6c9b827ec07ba10405324c Mon Sep 17 00:00:00 2001 From: Samyek Date: Sun, 1 Jan 2023 06:29:45 +0000 Subject: [PATCH 13/19] Fixed year update in alertNotifi inside footer --- .../utils/html/alert_notification.html | 106 ++++++++++-------- 1 file changed, 59 insertions(+), 47 deletions(-) diff --git a/api/chalicelib/utils/html/alert_notification.html b/api/chalicelib/utils/html/alert_notification.html index 2d63341f3..0f475dae1 100644 --- a/api/chalicelib/utils/html/alert_notification.html +++ b/api/chalicelib/utils/html/alert_notification.html @@ -1,60 +1,72 @@ - - - - + + + +
-
- - - - - - + +
-
- OpenReplay -
-
+ + + - - - -
+
+ + + + + - - - - - - - - + - -
+
+ OpenReplay +
+
-

- New alert!

-

- %(message)s

-

- See metrics for more details.

+ - -
-
-
-

- Sent with ♡ from OpenReplay © 2022 - All rights reserved.

- https://openreplay.com/ +

+

+ New alert!

+

+ %(message)s

+

+ See metrics for more details.

- -
+
+ + +
+
+
+
+

+ Sent with ♡ from OpenReplay © + - All rights + reserved.

+ https://openreplay.com/ +

+ +
+
+ + + + + - + \ No newline at end of file From 361010448aca32f74417820ecfccf5375ff6f481 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Tue, 3 Jan 2023 12:17:48 +0100 Subject: [PATCH 14/19] chore(build): Adding container signing support Signed-off-by: rjshrjndrn --- api/build.sh | 3 +++ backend/build.sh | 3 +++ ee/api/build_crons.sh | 3 +++ frontend/build.sh | 5 ++++- peers/build.sh | 3 +++ scripts/helmcharts/build_deploy.sh | 18 ++++++++++++------ sourcemap-reader/build.sh | 3 +++ utilities/build.sh | 3 +++ 8 files changed, 34 insertions(+), 7 deletions(-) diff --git a/api/build.sh b/api/build.sh index c5610e233..0e82088fe 100644 --- a/api/build.sh +++ b/api/build.sh @@ -50,6 +50,9 @@ function build_api(){ docker tag ${DOCKER_REPO:-'local'}/chalice:${image_tag} ${DOCKER_REPO:-'local'}/chalice:${tag}latest docker push ${DOCKER_REPO:-'local'}/chalice:${tag}latest } + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/chalice:${image_tag} + } echo "api docker build completed" } diff --git a/backend/build.sh b/backend/build.sh index 6ff4cd4ef..95a833139 100755 --- a/backend/build.sh +++ b/backend/build.sh @@ -27,6 +27,9 @@ function build_service() { [[ $PUSH_IMAGE -eq 1 ]] && { docker push ${DOCKER_REPO:-'local'}/$image:${image_tag} } + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/$image:${image_tag} + } echo "Build completed for $image" return } diff --git a/ee/api/build_crons.sh b/ee/api/build_crons.sh index 2f9d84a81..4d246265a 100644 --- a/ee/api/build_crons.sh +++ b/ee/api/build_crons.sh @@ -36,6 +36,9 @@ function build_crons(){ docker tag ${DOCKER_REPO:-'local'}/crons:${git_sha1} ${DOCKER_REPO:-'local'}/crons:${tag}latest docker push ${DOCKER_REPO:-'local'}/crons:${tag}latest } + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/crons:${image_tag} + } echo "completed crons build" } diff --git a/frontend/build.sh b/frontend/build.sh index 33b726c1a..55d295746 100644 --- a/frontend/build.sh +++ b/frontend/build.sh @@ -26,7 +26,10 @@ function build(){ [[ $PUSH_IMAGE -eq 1 ]] && { docker push ${DOCKER_REPO:-'local'}/frontend:${image_tag} } - echo "frotend build completed" + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/frontend:${image_tag} + } + echo "frontend build completed" } check_prereq diff --git a/peers/build.sh b/peers/build.sh index 25aa0d7ed..45cc97892 100644 --- a/peers/build.sh +++ b/peers/build.sh @@ -35,6 +35,9 @@ function build_api(){ docker tag ${DOCKER_REPO:-'local'}/peers:${image_tag} ${DOCKER_REPO:-'local'}/peers:latest docker push ${DOCKER_REPO:-'local'}/peers:latest } + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/peers:${image_tag} + } echo "peer docker build complted" } diff --git a/scripts/helmcharts/build_deploy.sh b/scripts/helmcharts/build_deploy.sh index e5714a1b2..cc5acd020 100644 --- a/scripts/helmcharts/build_deploy.sh +++ b/scripts/helmcharts/build_deploy.sh @@ -8,6 +8,12 @@ set -e # Removing local alpine:latest image docker rmi alpine || true +# Signing image +# cosign sign --key awskms:///alias/openreplay-container-sign image_url:tag +export SIGN_IMAGE=1 +export PUSH_IMAGE=1 +export SIGN_KEY="awskms:///alias/openreplay-container-sign" + echo $DOCKER_REPO [[ -z DOCKER_REPO ]] && { echo Set DOCKER_REPO="your docker registry" @@ -15,15 +21,15 @@ echo $DOCKER_REPO } || { docker login $DOCKER_REPO cd ../../backend - PUSH_IMAGE=1 bash build.sh $@ + bash build.sh $@ cd ../utilities - PUSH_IMAGE=1 bash build.sh $@ + bash build.sh $@ cd ../peers - PUSH_IMAGE=1 bash build.sh $@ + bash build.sh $@ cd ../frontend - PUSH_IMAGE=1 bash build.sh $@ + bash build.sh $@ cd ../sourcemap-reader - PUSH_IMAGE=1 bash build.sh $@ + bash build.sh $@ cd ../api - PUSH_IMAGE=1 bash build.sh $@ + bash build.sh $@ } diff --git a/sourcemap-reader/build.sh b/sourcemap-reader/build.sh index 33d318cab..859347fd4 100644 --- a/sourcemap-reader/build.sh +++ b/sourcemap-reader/build.sh @@ -46,6 +46,9 @@ function build_api(){ docker tag ${DOCKER_REPO:-'local'}/${image_name}:${image_tag} ${DOCKER_REPO:-'local'}/${image_name}:${tag}latest docker push ${DOCKER_REPO:-'local'}/${image_name}:${tag}latest } + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/$image_name:${image_tag} + } echo "${image_name} docker build completed" } diff --git a/utilities/build.sh b/utilities/build.sh index d1042375b..87ff7f3e6 100644 --- a/utilities/build.sh +++ b/utilities/build.sh @@ -36,6 +36,9 @@ function build_api(){ docker tag ${DOCKER_REPO:-'local'}/assist:${image_tag} ${DOCKER_REPO:-'local'}/assist:latest docker push ${DOCKER_REPO:-'local'}/assist:latest } + [[ $SIGN_IMAGE -eq 1 ]] && { + cosign sign --key $SIGN_KEY ${DOCKER_REPO:-'local'}/assist:${image_tag} + } echo "build completed for assist" } From af589aca868631f572d02ad2a415f9a63c5c8bd5 Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Tue, 3 Jan 2023 12:50:11 +0100 Subject: [PATCH 15/19] chore(helm): kyverno update container signature key Signed-off-by: rjshrjndrn --- scripts/helmcharts/toolings/templates/kyverno.yaml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/scripts/helmcharts/toolings/templates/kyverno.yaml b/scripts/helmcharts/toolings/templates/kyverno.yaml index 9db721726..ee3a9691c 100644 --- a/scripts/helmcharts/toolings/templates/kyverno.yaml +++ b/scripts/helmcharts/toolings/templates/kyverno.yaml @@ -24,6 +24,16 @@ spec: - keys: publicKeys: |- -----BEGIN PUBLIC KEY----- - MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuSUrc90YHUpXwB2E7Hu080K6z+Yc - esqGVAEESg9lEjQUaxOUqRkW3nI/vXRQayLEfBs6ugPNqCH+DbuarI9Jkg== + MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAoLidzRiNIO3l/sWCYw2f + Ct71YSj7UVerhbR81TNEKYtW0fUqg4GagS+esprcXteHPoBI+ZcfL2xJIs0ZNHZs + A+2VXYrsFRgREtABFCwJ2G51ybusoS3jpBsAmSNjG0uzseDxQMTh0arNOlNbhbmI + Tj1ty2JfyLejDKlxavXheKmJGb+7IdDCMmP3f5mXSsJpsOM8SJo49BkvKhTwzjc0 + 01dsSLo5mk9jeG2C6UvPCQeMIUKaf5GlYWyFx7vLZ+z5be9TPuWDH4GO0RtxJVXt + tqmk32aKe+0KDLH0ak9WRVz3ugYEjs+tqdO3y3ALLoGAAI+yGxGSfWFDnDj5AXpA + 2/XYSJAWRzPu35/H3laSrxaApYWN5an69jI30JY7SoEy/k+10oIGe2FGIihXTdq+ + As3IKPEtvuN9s3RTm2ujV/7rEnVVKWiHvQCwH8rxhsbDTeJCoNs8hSBUq1Muttct + EWML8s/TCIK01PyvH6VNQSnc+lRKAJOd5NpZ/SVMXBbrykCQSZPE8RcaQum3nMxE + Tri24VcWfRHj1WwUYzxpmoVE5F1lw0lqQIXlwz+AFhCLGsePSkjFShFtNFQuX22r + Q73JTt3FX4JEzaaKC5BZwXmkEs3MVpQj43HuEqDyejlsPWwRBYwZIzXpoBhOCFHD + t4PI8n+1dSE+uavu/ijgXl8CAwEAAQ== -----END PUBLIC KEY----- From b0c74bb327df183f854d7fd266abfacb5bffbbd5 Mon Sep 17 00:00:00 2001 From: Alex Kaminskii Date: Wed, 4 Jan 2023 14:09:39 +0100 Subject: [PATCH 16/19] refactor(player):ISP on StyleManager setLoader --- frontend/app/player/web/MessageManager.ts | 10 ++++------ frontend/app/player/web/WebLivePlayer.ts | 2 +- frontend/app/player/web/managers/DOM/DOMManager.ts | 4 ++-- frontend/app/player/web/managers/DOM/StylesManager.ts | 6 +++--- frontend/app/player/web/managers/PagesManager.ts | 8 ++++++-- 5 files changed, 16 insertions(+), 14 deletions(-) diff --git a/frontend/app/player/web/MessageManager.ts b/frontend/app/player/web/MessageManager.ts index 0c52350d4..b2410f6cd 100644 --- a/frontend/app/player/web/MessageManager.ts +++ b/frontend/app/player/web/MessageManager.ts @@ -123,7 +123,10 @@ export default class MessageManager { private readonly screen: Screen, initialLists?: Partial ) { - this.pagesManager = new PagesManager(screen, this.session.isMobile, this) + this.pagesManager = new PagesManager(screen, this.session.isMobile, cssLoading => { + screen.displayFrame(!cssLoading) + state.update({ cssLoading }) + }) this.mouseMoveManager = new MouseMoveManager(screen) this.sessionStart = this.session.startedAt @@ -482,11 +485,6 @@ export default class MessageManager { this.state.update({ messagesLoading }); } - setCSSLoading(cssLoading: boolean) { - this.screen.displayFrame(!cssLoading); - this.state.update({ cssLoading }); - } - private setSize({ height, width }: { height: number, width: number }) { this.screen.scale({ height, width }); this.state.update({ width, height }); diff --git a/frontend/app/player/web/WebLivePlayer.ts b/frontend/app/player/web/WebLivePlayer.ts index 87e88225a..81f1c2178 100644 --- a/frontend/app/player/web/WebLivePlayer.ts +++ b/frontend/app/player/web/WebLivePlayer.ts @@ -28,7 +28,7 @@ export default class WebLivePlayer extends WebPlayer { this.assistManager = new AssistManager( session, - f => this.messageManager.setCSSLoading(f), + f => this.messageManager.setMessagesLoading(f), (msg, idx) => { this.incomingMessages.push(msg) if (!this.historyFileIsLoading) { diff --git a/frontend/app/player/web/managers/DOM/DOMManager.ts b/frontend/app/player/web/managers/DOM/DOMManager.ts index d0005352c..e47a803fd 100644 --- a/frontend/app/player/web/managers/DOM/DOMManager.ts +++ b/frontend/app/player/web/managers/DOM/DOMManager.ts @@ -56,10 +56,10 @@ export default class DOMManager extends ListWalker { private readonly screen: Screen, private readonly isMobile: boolean, public readonly time: number, - mm: MessageManager, + setCssLoading: ConstructorParameters[1], ) { super() - this.stylesManager = new StylesManager(screen, mm) + this.stylesManager = new StylesManager(screen, setCssLoading) } append(m: Message): void { diff --git a/frontend/app/player/web/managers/DOM/StylesManager.ts b/frontend/app/player/web/managers/DOM/StylesManager.ts index 5cffa9be9..c38ea0281 100644 --- a/frontend/app/player/web/managers/DOM/StylesManager.ts +++ b/frontend/app/player/web/managers/DOM/StylesManager.ts @@ -24,7 +24,7 @@ export default class StylesManager { private linkLoadPromises: Array> = []; private skipCSSLinks: Array = []; // should be common for all pages - constructor(private readonly screen: Screen, private readonly mm: MessageManager) {} + constructor(private readonly screen: Screen, private readonly setLoading: (flag: boolean) => void) {} reset():void { this.linkLoadingCount = 0; @@ -38,7 +38,7 @@ export default class StylesManager { const promise = new Promise((resolve) => { if (this.skipCSSLinks.includes(value)) resolve(); this.linkLoadingCount++; - this.mm.setCSSLoading(true); + this.setLoading(true); const addSkipAndResolve = () => { this.skipCSSLinks.push(value); // watch out resolve() @@ -57,7 +57,7 @@ export default class StylesManager { clearTimeout(timeoutId); this.linkLoadingCount--; if (this.linkLoadingCount === 0) { - this.mm.setCSSLoading(false); + this.setLoading(false); } }); this.linkLoadPromises.push(promise); diff --git a/frontend/app/player/web/managers/PagesManager.ts b/frontend/app/player/web/managers/PagesManager.ts index e4248f2ab..35d47a670 100644 --- a/frontend/app/player/web/managers/PagesManager.ts +++ b/frontend/app/player/web/managers/PagesManager.ts @@ -10,14 +10,18 @@ import DOMManager from './DOM/DOMManager'; export default class PagesManager extends ListWalker { private currentPage: DOMManager | null = null - constructor(private screen: Screen, private isMobile: boolean, private mm: MessageManager) { super() } + constructor( + private screen: Screen, + private isMobile: boolean, + private setCssLoading: ConstructorParameters[3], +) { super() } /* Assumed that messages added in a correct time sequence. */ appendMessage(m: Message): void { if (m.tp === MType.CreateDocument) { - super.append(new DOMManager(this.screen, this.isMobile, m.time, this.mm)) + super.append(new DOMManager(this.screen, this.isMobile, m.time, this.setCssLoading)) } if (this.last === null) { // Log wrong From 6f327e61092a9affe28464bdc909d01d214e4a9e Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Wed, 4 Jan 2023 23:52:23 +0100 Subject: [PATCH 17/19] chore(helm): Adding tls enabled to redis Without this flag, if the helm version is older, then we'll get template error. Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay/values.yaml | 74 ++--------------------- 1 file changed, 4 insertions(+), 70 deletions(-) diff --git a/scripts/helmcharts/openreplay/values.yaml b/scripts/helmcharts/openreplay/values.yaml index 33cc3e178..67cf00405 100644 --- a/scripts/helmcharts/openreplay/values.yaml +++ b/scripts/helmcharts/openreplay/values.yaml @@ -1,73 +1,6 @@ -# Default values for openreplay. -# This is a YAML-formatted file. -# Declare variables to be passed into your templates. - -replicaCount: 1 - -image: - repository: nginx - pullPolicy: IfNotPresent - # Overrides the image tag whose default is the chart appVersion. - tag: "" - -imagePullSecrets: [] -nameOverride: "" -fullnameOverride: "" - -serviceAccount: - # Specifies whether a service account should be created - create: true - # Annotations to add to the service account - annotations: {} - # The name of the service account to use. - # If not set and create is true, a name is generated using the fullname template - name: "" - -podAnnotations: {} - -securityContext: - runAsUser: 1001 - runAsGroup: 1001 -podSecurityContext: - runAsUser: 1001 - runAsGroup: 1001 - fsGroup: 1001 - fsGroupChangePolicy: "OnRootMismatch" -# podSecurityContext: {} - # fsGroup: 2000 - -# securityContext: {} - # capabilities: - # drop: - # - ALL - # readOnlyRootFilesystem: true - # runAsNonRoot: true - # runAsUser: 1000 - -resources: {} - # We usually recommend not to specify default resources and to leave this as a conscious - # choice for the user. This also increases chances charts run on environments with little - # resources, such as Minikube. If you do want to specify resources, uncomment the following - # lines, adjust them as necessary, and remove the curly braces after 'resources:'. - # limits: - # cpu: 100m - # memory: 128Mi - # requests: - # cpu: 100m - # memory: 128Mi - -autoscaling: - enabled: false - minReplicas: 1 - maxReplicas: 100 - targetCPUUtilizationPercentage: 80 - # targetMemoryUtilizationPercentage: 80 - -nodeSelector: {} - -tolerations: [] - -affinity: {} +redis: &redis + tls: + enabled: false ingress-nginx: enabled: true @@ -103,4 +36,5 @@ vault: &vault global: vault: *vault + redis: *redis clusterDomain: "svc.cluster.local" From de23c3fccbe2908f321354a5130e04d879034feb Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 5 Jan 2023 00:38:17 +0100 Subject: [PATCH 18/19] chore(cli): Adding debug and skip migration flags Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay-cli | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/scripts/helmcharts/openreplay-cli b/scripts/helmcharts/openreplay-cli index d4b36a663..e4fad5a5c 100755 --- a/scripts/helmcharts/openreplay-cli +++ b/scripts/helmcharts/openreplay-cli @@ -47,7 +47,7 @@ cat <<"EOF" EOF - echo -e "${green}Usage: openreplay-cli [ -h | --help ] + echo -e "${green}Usage: [DEBUG=1|SKIP_MIGRAION=1] openreplay-cli [ -h | --help ] [ -d | --status ] [ -v | --verbose ] [ -l | --logs SERVICE ] @@ -96,11 +96,10 @@ restart() { helmInstall() { - [[ FORCE_UPGRADE_FRONTENT -eq 1 ]] && { - helm upgrade --install openreplay -n app openreplay -f vars.yaml --set forceUpgradeFrontend=true - } || { - helm upgrade --install openreplay -n app openreplay -f vars.yaml - } + # Adding variables + [[ $SKIP_MIGRATION -eq 1 ]] && ARGS="--set skipMigration=true" + [[ $DEBUG -eq 1 ]] && ARGS="$ARGS --debug" + helm upgrade --install openreplay -n app openreplay -f vars.yaml $ARGS } upgrade() { From 5ae64e4e2ab904af1295b897f4cc6e4a5153f24f Mon Sep 17 00:00:00 2001 From: rjshrjndrn Date: Thu, 5 Jan 2023 01:00:20 +0100 Subject: [PATCH 19/19] chore(cli): placeholder error handler Signed-off-by: rjshrjndrn --- scripts/helmcharts/openreplay-cli | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/scripts/helmcharts/openreplay-cli b/scripts/helmcharts/openreplay-cli index e4fad5a5c..903f9d312 100755 --- a/scripts/helmcharts/openreplay-cli +++ b/scripts/helmcharts/openreplay-cli @@ -7,12 +7,7 @@ set -eE -o pipefail # same as: `set -o errexit -o errtrace` trap err EXIT err() { - case "$?" in - 0) - ;; - *) - ;; - esac + exit $1 } # make all stderr red @@ -144,4 +139,4 @@ do esac done -[[ $VERBOSE -eq 1 ]] && set -x +[[ $VERBOSE -eq 1 ]] && set -x || true