Skip to content

Commit

Permalink
Merge branch 'develop' into DEVSVCS-547-automation-tests-test-config-…
Browse files Browse the repository at this point in the history
…contract-address
  • Loading branch information
anirudhwarrier authored Sep 20, 2024
2 parents fd9e98a + 7b324ca commit 562cbe4
Show file tree
Hide file tree
Showing 71 changed files with 2,677 additions and 1,738 deletions.
5 changes: 5 additions & 0 deletions .changeset/sixty-cougars-mix.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"chainlink": patch
---

Use tx in insertLogsWithinTx #internal
5 changes: 5 additions & 0 deletions .changeset/sour-ears-wink.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"chainlink": patch
---

#internal KMS client for deployment
6 changes: 2 additions & 4 deletions .github/workflows/run-e2e-tests-reusable-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -572,8 +572,7 @@ jobs:
E2E_TEST_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }}
E2E_TEST_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push
E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }}
E2E_TEST_GRAFANA_BASE_URL: "http://localhost:8080/primary"
E2E_TEST_GRAFANA_DASHBOARD_URL: ${{ matrix.tests.test_env_vars.E2E_TEST_GRAFANA_DASHBOARD_URL || '/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs' }}
E2E_TEST_GRAFANA_DASHBOARD_URL: ${{ matrix.tests.test_env_vars.E2E_TEST_GRAFANA_DASHBOARD_URL }}
E2E_TEST_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }}
E2E_TEST_PYROSCOPE_ENVIRONMENT: ${{ matrix.tests.pyroscope_env }}
E2E_TEST_PYROSCOPE_SERVER_URL: ${{ matrix.tests.pyroscope_env != '' && secrets.QA_PYROSCOPE_INSTANCE || '' }}
Expand Down Expand Up @@ -788,8 +787,7 @@ jobs:
E2E_TEST_LOKI_TENANT_ID: ${{ secrets.GRAFANA_INTERNAL_TENANT_ID }}
E2E_TEST_LOKI_ENDPOINT: https://${{ secrets.GRAFANA_INTERNAL_HOST }}/loki/api/v1/push
E2E_TEST_LOKI_BASIC_AUTH: ${{ secrets.GRAFANA_INTERNAL_BASIC_AUTH }}
E2E_TEST_GRAFANA_BASE_URL: "http://localhost:8080/primary"
E2E_TEST_GRAFANA_DASHBOARD_URL: ${{ matrix.tests.test_env_vars.E2E_TEST_GRAFANA_DASHBOARD_URL || '/d/ddf75041-1e39-42af-aa46-361fe4c36e9e/ci-e2e-tests-logs' }}
E2E_TEST_GRAFANA_DASHBOARD_URL: ${{ matrix.tests.test_env_vars.E2E_TEST_GRAFANA_DASHBOARD_URL }}
E2E_TEST_GRAFANA_BEARER_TOKEN: ${{ secrets.GRAFANA_INTERNAL_URL_SHORTENER_TOKEN }}
E2E_TEST_PYROSCOPE_ENVIRONMENT: ${{ matrix.tests.pyroscope_env }}
E2E_TEST_PYROSCOPE_SERVER_URL: ${{ matrix.tests.pyroscope_env != '' && secrets.QA_PYROSCOPE_INSTANCE || '' }}
Expand Down
2 changes: 0 additions & 2 deletions core/capabilities/ccip/ccip_integration_tests/helpers.go
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,6 @@ func (h *homeChain) AddNodes(
p2pIDs [][32]byte,
capabilityIDs [][32]byte,
) {
// Need to sort, otherwise _checkIsValidUniqueSubset onChain will fail
sortP2PIDS(p2pIDs)
var nodeParams []kcr.CapabilitiesRegistryNodeParams
for _, p2pID := range p2pIDs {
Expand All @@ -430,7 +429,6 @@ func AddChainConfig(
p2pIDs [][32]byte,
f uint8,
) ccip_config.CCIPConfigTypesChainConfigInfo {
// Need to sort, otherwise _checkIsValidUniqueSubset onChain will fail
sortP2PIDS(p2pIDs)
// First Add ChainConfig that includes all p2pIDs as readers
encodedExtraChainConfig, err := chainconfig.EncodeChainConfig(chainconfig.ChainConfig{
Expand Down
32 changes: 18 additions & 14 deletions core/chains/evm/logpoller/log_poller_internal_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"math/big"
"strings"
"sync"
"sync/atomic"
"testing"
"time"

Expand Down Expand Up @@ -287,12 +288,14 @@ func TestLogPoller_Replay(t *testing.T) {
db := pgtest.NewSqlxDB(t)
orm := NewORM(chainID, db, lggr)

head := evmtypes.Head{Number: 4}
var head atomic.Pointer[evmtypes.Head]
head.Store(&evmtypes.Head{Number: 4})

events := []common.Hash{EmitterABI.Events["Log1"].ID}
log1 := types.Log{
Index: 0,
BlockHash: common.Hash{},
BlockNumber: uint64(head.Number),
BlockNumber: uint64(head.Load().Number),
Topics: events,
Address: addr,
TxHash: common.HexToHash("0x1234"),
Expand All @@ -301,8 +304,7 @@ func TestLogPoller_Replay(t *testing.T) {

ec := evmclimocks.NewClient(t)
ec.On("HeadByNumber", mock.Anything, mock.Anything).Return(func(context.Context, *big.Int) (*evmtypes.Head, error) {
headCopy := head
return &headCopy, nil
return head.Load(), nil
})
ec.On("FilterLogs", mock.Anything, mock.Anything).Return([]types.Log{log1}, nil).Once()
ec.On("ConfiguredChainID").Return(chainID, nil)
Expand All @@ -318,9 +320,9 @@ func TestLogPoller_Replay(t *testing.T) {
headTracker := htMocks.NewHeadTracker[*evmtypes.Head, common.Hash](t)

headTracker.On("LatestAndFinalizedBlock", mock.Anything).Return(func(ctx context.Context) (*evmtypes.Head, *evmtypes.Head, error) {
headCopy := head
finalized := &evmtypes.Head{Number: headCopy.Number - lpOpts.FinalityDepth}
return &headCopy, finalized, nil
h := head.Load()
finalized := &evmtypes.Head{Number: h.Number - lpOpts.FinalityDepth}
return h, finalized, nil
})
lp := NewLogPoller(orm, ec, lggr, headTracker, lpOpts)

Expand Down Expand Up @@ -394,7 +396,7 @@ func TestLogPoller_Replay(t *testing.T) {
var wg sync.WaitGroup
defer func() { wg.Wait() }()
ec.On("FilterLogs", mock.Anything, mock.Anything).Once().Return([]types.Log{log1}, nil).Run(func(args mock.Arguments) {
head = evmtypes.Head{Number: 4}
head.Store(&evmtypes.Head{Number: 4})
wg.Add(1)
go func() {
defer wg.Done()
Expand All @@ -421,7 +423,7 @@ func TestLogPoller_Replay(t *testing.T) {

ec.On("FilterLogs", mock.Anything, mock.Anything).Return([]types.Log{log1}, nil).Maybe() // in case task gets delayed by >= 100ms

head = evmtypes.Head{Number: 5}
head.Store(&evmtypes.Head{Number: 5})
t.Cleanup(lp.reset)
servicetest.Run(t, lp)

Expand All @@ -448,7 +450,7 @@ func TestLogPoller_Replay(t *testing.T) {
go func() {
defer close(done)

head = evmtypes.Head{Number: 4} // Restore latest block to 4, so this matches the fromBlock requested
head.Store(&evmtypes.Head{Number: 4}) // Restore latest block to 4, so this matches the fromBlock requested
select {
case lp.replayStart <- 4:
case <-ctx.Done():
Expand All @@ -469,7 +471,7 @@ func TestLogPoller_Replay(t *testing.T) {
ec.On("FilterLogs", mock.Anything, mock.Anything).Return([]types.Log{log1}, nil)

t.Cleanup(lp.reset)
head = evmtypes.Head{Number: 5} // Latest block must be > lastProcessed in order for SaveAndPollLogs() to call FilterLogs()
head.Store(&evmtypes.Head{Number: 5}) // Latest block must be > lastProcessed in order for SaveAndPollLogs() to call FilterLogs()
servicetest.Run(t, lp)

select {
Expand All @@ -482,7 +484,8 @@ func TestLogPoller_Replay(t *testing.T) {
// ReplayAsync should return as soon as replayStart is received
t.Run("ReplayAsync success", func(t *testing.T) {
t.Cleanup(lp.reset)
head = evmtypes.Head{Number: 5}

head.Store(&evmtypes.Head{Number: 5})
ec.On("FilterLogs", mock.Anything, mock.Anything).Return([]types.Log{log1}, nil)
mockBatchCallContext(t, ec)
servicetest.Run(t, lp)
Expand All @@ -496,7 +499,7 @@ func TestLogPoller_Replay(t *testing.T) {
ctx := testutils.Context(t)
t.Cleanup(lp.reset)
servicetest.Run(t, lp)
head = evmtypes.Head{Number: 4}
head.Store(&evmtypes.Head{Number: 4})

anyErr := pkgerrors.New("async error")
observedLogs.TakeAll()
Expand Down Expand Up @@ -528,7 +531,8 @@ func TestLogPoller_Replay(t *testing.T) {
err := lp.orm.DeleteLogsAndBlocksAfter(ctx, 0)
require.NoError(t, err)

err = lp.orm.InsertBlock(ctx, head.Hash, head.Number, head.Timestamp, head.Number)
h := head.Load()
err = lp.orm.InsertBlock(ctx, h.Hash, h.Number, h.Timestamp, h.Number)
require.NoError(t, err)

ec.On("FilterLogs", mock.Anything, mock.Anything).Return([]types.Log{log1}, nil)
Expand Down
2 changes: 1 addition & 1 deletion core/chains/evm/logpoller/orm.go
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,7 @@ func (o *DSORM) insertLogsWithinTx(ctx context.Context, logs []Log, tx sqlutil.D
(:evm_chain_id, :log_index, :block_hash, :block_number, :block_timestamp, :address, :event_sig, :topics, :tx_hash, :data, NOW())
ON CONFLICT DO NOTHING`

_, err := o.ds.NamedExecContext(ctx, query, logs[start:end])
_, err := tx.NamedExecContext(ctx, query, logs[start:end])
if err != nil {
if pkgerrors.Is(err, context.DeadlineExceeded) && batchInsertSize > 500 {
// In case of DB timeouts, try to insert again with a smaller batch upto a limit
Expand Down
2 changes: 1 addition & 1 deletion core/config/toml/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -1695,7 +1695,7 @@ func (b *Telemetry) ValidateConfig() (err error) {
}
if b.InsecureConnection != nil && *b.InsecureConnection {
if build.IsProd() {
err = multierr.Append(err, configutils.ErrInvalid{Name: "InsecureConnection", Msg: "cannot be used in production builds"})
err = multierr.Append(err, configutils.ErrInvalid{Name: "InsecureConnection", Value: true, Msg: "cannot be used in production builds"})
}
} else {
if b.CACertFile == nil || *b.CACertFile == "" {
Expand Down
Loading

0 comments on commit 562cbe4

Please sign in to comment.