Skip to content

Commit 82ca835

Browse files
fix: use slices.Concat instead of sometimes modifying r.Options
1 parent f6381a8 commit 82ca835

File tree

10 files changed

+26
-16
lines changed

10 files changed

+26
-16
lines changed

client.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import (
66
"context"
77
"net/http"
88
"os"
9+
"slices"
910

1011
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/requestconfig"
1112
"github.com/ScrapeGraphAI/scrapegraphai-go/option"
@@ -94,7 +95,7 @@ func NewClient(opts ...option.RequestOption) (r Client) {
9495
// For even greater flexibility, see [option.WithResponseInto] and
9596
// [option.WithResponseBodyInto].
9697
func (r *Client) Execute(ctx context.Context, method string, path string, params any, res any, opts ...option.RequestOption) error {
97-
opts = append(r.Options, opts...)
98+
opts = slices.Concat(r.Options, opts)
9899
return requestconfig.ExecuteNewRequest(ctx, method, path, params, res, opts...)
99100
}
100101

crawl.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import (
77
"errors"
88
"fmt"
99
"net/http"
10+
"slices"
1011

1112
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/apijson"
1213
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/requestconfig"
@@ -36,7 +37,7 @@ func NewCrawlService(opts ...option.RequestOption) (r CrawlService) {
3637

3738
// Retrieve the status and results of a crawling job
3839
func (r *CrawlService) GetResults(ctx context.Context, taskID string, opts ...option.RequestOption) (res *CrawlGetResultsResponse, err error) {
39-
opts = append(r.Options[:], opts...)
40+
opts = slices.Concat(r.Options, opts)
4041
if taskID == "" {
4142
err = errors.New("missing required task_id parameter")
4243
return
@@ -50,7 +51,7 @@ func (r *CrawlService) GetResults(ctx context.Context, taskID string, opts ...op
5051
// extraction mode and markdown conversion mode. Returns a task ID for async
5152
// processing.
5253
func (r *CrawlService) Start(ctx context.Context, body CrawlStartParams, opts ...option.RequestOption) (res *CrawlStartResponse, err error) {
53-
opts = append(r.Options[:], opts...)
54+
opts = slices.Concat(r.Options, opts)
5455
path := "crawl"
5556
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
5657
return

credit.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ package scrapegraphai
55
import (
66
"context"
77
"net/http"
8+
"slices"
89

910
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/apijson"
1011
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/requestconfig"
@@ -33,7 +34,7 @@ func NewCreditService(opts ...option.RequestOption) (r CreditService) {
3334

3435
// Retrieve the current credit balance and usage for the authenticated user
3536
func (r *CreditService) Get(ctx context.Context, opts ...option.RequestOption) (res *CreditGetResponse, err error) {
36-
opts = append(r.Options[:], opts...)
37+
opts = slices.Concat(r.Options, opts)
3738
path := "credits"
3839
err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...)
3940
return

feedback.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ package scrapegraphai
55
import (
66
"context"
77
"net/http"
8+
"slices"
89
"time"
910

1011
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/apijson"
@@ -35,7 +36,7 @@ func NewFeedbackService(opts ...option.RequestOption) (r FeedbackService) {
3536

3637
// Submit feedback for a specific request
3738
func (r *FeedbackService) Submit(ctx context.Context, body FeedbackSubmitParams, opts ...option.RequestOption) (res *FeedbackSubmitResponse, err error) {
38-
opts = append(r.Options[:], opts...)
39+
opts = slices.Concat(r.Options, opts)
3940
path := "feedback"
4041
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
4142
return

generateschema.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import (
88
"errors"
99
"fmt"
1010
"net/http"
11+
"slices"
1112

1213
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/apijson"
1314
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/requestconfig"
@@ -38,15 +39,15 @@ func NewGenerateSchemaService(opts ...option.RequestOption) (r GenerateSchemaSer
3839
// Generate or modify JSON schemas based on natural language descriptions. Can
3940
// create new schemas or extend existing ones.
4041
func (r *GenerateSchemaService) New(ctx context.Context, body GenerateSchemaNewParams, opts ...option.RequestOption) (res *GenerateSchemaNewResponse, err error) {
41-
opts = append(r.Options[:], opts...)
42+
opts = slices.Concat(r.Options, opts)
4243
path := "generate_schema"
4344
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
4445
return
4546
}
4647

4748
// Retrieve the status and results of a schema generation request
4849
func (r *GenerateSchemaService) Get(ctx context.Context, requestID string, opts ...option.RequestOption) (res *GenerateSchemaGetResponseUnion, err error) {
49-
opts = append(r.Options[:], opts...)
50+
opts = slices.Concat(r.Options, opts)
5051
if requestID == "" {
5152
err = errors.New("missing required request_id parameter")
5253
return

healthz.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ package scrapegraphai
55
import (
66
"context"
77
"net/http"
8+
"slices"
89

910
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/apijson"
1011
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/requestconfig"
@@ -33,7 +34,7 @@ func NewHealthzService(opts ...option.RequestOption) (r HealthzService) {
3334

3435
// Check the health status of the service
3536
func (r *HealthzService) Check(ctx context.Context, opts ...option.RequestOption) (res *HealthzCheckResponse, err error) {
36-
opts = append(r.Options[:], opts...)
37+
opts = slices.Concat(r.Options, opts)
3738
path := "healthz"
3839
err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...)
3940
return

markdownify.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import (
88
"errors"
99
"fmt"
1010
"net/http"
11+
"slices"
1112

1213
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/apijson"
1314
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/requestconfig"
@@ -37,15 +38,15 @@ func NewMarkdownifyService(opts ...option.RequestOption) (r MarkdownifyService)
3738

3839
// Convert web page content to clean Markdown format
3940
func (r *MarkdownifyService) Convert(ctx context.Context, body MarkdownifyConvertParams, opts ...option.RequestOption) (res *CompletedMarkdownify, err error) {
40-
opts = append(r.Options[:], opts...)
41+
opts = slices.Concat(r.Options, opts)
4142
path := "markdownify"
4243
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
4344
return
4445
}
4546

4647
// Retrieve the status and results of a markdown conversion
4748
func (r *MarkdownifyService) GetStatus(ctx context.Context, requestID string, opts ...option.RequestOption) (res *MarkdownifyGetStatusResponseUnion, err error) {
48-
opts = append(r.Options[:], opts...)
49+
opts = slices.Concat(r.Options, opts)
4950
if requestID == "" {
5051
err = errors.New("missing required request_id parameter")
5152
return

searchscraper.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import (
88
"errors"
99
"fmt"
1010
"net/http"
11+
"slices"
1112

1213
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/apijson"
1314
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/requestconfig"
@@ -39,15 +40,15 @@ func NewSearchscraperService(opts ...option.RequestOption) (r SearchscraperServi
3940
// multiple websites. Uses LLM to refine search queries and merge results from
4041
// different sources.
4142
func (r *SearchscraperService) New(ctx context.Context, body SearchscraperNewParams, opts ...option.RequestOption) (res *CompletedSearchScraper, err error) {
42-
opts = append(r.Options[:], opts...)
43+
opts = slices.Concat(r.Options, opts)
4344
path := "searchscraper"
4445
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
4546
return
4647
}
4748

4849
// Retrieve the status and results of a search scraping operation
4950
func (r *SearchscraperService) GetStatus(ctx context.Context, requestID string, opts ...option.RequestOption) (res *SearchscraperGetStatusResponseUnion, err error) {
50-
opts = append(r.Options[:], opts...)
51+
opts = slices.Concat(r.Options, opts)
5152
if requestID == "" {
5253
err = errors.New("missing required request_id parameter")
5354
return

smartscraper.go

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import (
88
"errors"
99
"fmt"
1010
"net/http"
11+
"slices"
1112

1213
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/apijson"
1314
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/requestconfig"
@@ -38,15 +39,15 @@ func NewSmartscraperService(opts ...option.RequestOption) (r SmartscraperService
3839
// Main scraping endpoint with LLM-powered content analysis. Supports various
3940
// fetching providers, infinite scrolling, pagination, and custom output schemas.
4041
func (r *SmartscraperService) New(ctx context.Context, body SmartscraperNewParams, opts ...option.RequestOption) (res *CompletedSmartscraper, err error) {
41-
opts = append(r.Options[:], opts...)
42+
opts = slices.Concat(r.Options, opts)
4243
path := "smartscraper"
4344
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPost, path, body, &res, opts...)
4445
return
4546
}
4647

4748
// Retrieve the status and results of a scraping operation
4849
func (r *SmartscraperService) Get(ctx context.Context, requestID string, opts ...option.RequestOption) (res *SmartscraperGetResponseUnion, err error) {
49-
opts = append(r.Options[:], opts...)
50+
opts = slices.Concat(r.Options, opts)
5051
if requestID == "" {
5152
err = errors.New("missing required request_id parameter")
5253
return
@@ -58,7 +59,7 @@ func (r *SmartscraperService) Get(ctx context.Context, requestID string, opts ..
5859

5960
// Retrieve the status and results of a scraping operation
6061
func (r *SmartscraperService) List(ctx context.Context, opts ...option.RequestOption) (res *SmartscraperListResponseUnion, err error) {
61-
opts = append(r.Options[:], opts...)
62+
opts = slices.Concat(r.Options, opts)
6263
path := "smartscraper"
6364
err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...)
6465
return

validate.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ package scrapegraphai
55
import (
66
"context"
77
"net/http"
8+
"slices"
89

910
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/apijson"
1011
"github.com/ScrapeGraphAI/scrapegraphai-go/internal/requestconfig"
@@ -33,7 +34,7 @@ func NewValidateService(opts ...option.RequestOption) (r ValidateService) {
3334

3435
// Validate the API key and retrieve associated user email
3536
func (r *ValidateService) APIKey(ctx context.Context, opts ...option.RequestOption) (res *ValidateAPIKeyResponse, err error) {
36-
opts = append(r.Options[:], opts...)
37+
opts = slices.Concat(r.Options, opts)
3738
path := "validate"
3839
err = requestconfig.ExecuteNewRequest(ctx, http.MethodGet, path, nil, &res, opts...)
3940
return

0 commit comments

Comments
 (0)