-
Notifications
You must be signed in to change notification settings - Fork 3
Closed
Labels
bugSomething isn't workingSomething isn't workingchall-managerRelated to chall-managerRelated to chall-manager
Description
What happened?
I launched an instance through CTFd and subsequently wanted to delete it through CTFd. The first try gave an error, the second try worked.
What you expected
The instance should have been deleted at the first attempt without showing an error message.
scope
Pulumi deployment
Version
latest
Relevant log output
{"level":"info","ts":1763079187.2854004,"caller":"global/log.go:24","msg":"creating new instance","challenge_id":"17","identity":"ee8a80ff9795a47a","source_id":"1"}
{"level":"info","ts":1763079211.9897072,"caller":"global/log.go:24","msg":"instance created successfully","challenge_id":"17","identity":"ee8a80ff9795a47a","source_id":"1"}
{"level":"error","ts":1763079214.8899035,"caller":"global/log.go:28","msg":"creating challenge instance stack","error":"internal server error: invalid scenario: open /root/.cache/chall-manager/oci/sha256:5d644ce467188a67a451547f82be6c09ea708488cbda9dc5b020afd26e1134fa/Pulumi.yml: no such file or directory","challenge_id":"17","identity":"ee8a80ff9795a47a","source_id":"1","stacktrace":"github.com/ctfer-io/chall-manager/global.(*Logger).Error\n\t/go/src/global/log.go:28\ngithub.com/ctfer-io/chall-manager/api/v1/instance.(*Manager).DeleteInstance\n\t/go/src/api/v1/instance/delete.go:182\ngithub.com/ctfer-io/chall-manager/api/v1/instance._InstanceManager_DeleteInstance_Handler\n\t/go/src/api/v1/instance/instance_grpc.pb.go:270\ngoogle.golang.org/grpc.(*Server).processUnaryRPC\n\t/go/pkg/mod/google.golang.org/[email protected]/server.go:1431\ngoogle.golang.org/grpc.(*Server).handleStream\n\t/go/pkg/mod/google.golang.org/[email protected]/server.go:1842\ngoogle.golang.org/grpc.(*Server).serveStreams.func2.1\n\t/go/pkg/mod/google.golang.org/[email protected]/server.go:1061"}
{"level":"info","ts":1763079220.7138975,"caller":"global/log.go:24","msg":"deleting instance","challenge_id":"17","identity":"ee8a80ff9795a47a","source_id":"1"}
{"level":"info","ts":1763079236.8786423,"caller":"global/log.go:24","msg":"deleted instance successfully","challenge_id":"17","identity":"ee8a80ff9795a47a","source_id":"1"}Scenario
package main
import (
"fmt"
"github.com/ctfer-io/chall-manager/sdk"
appsv1 "github.com/pulumi/pulumi-kubernetes/sdk/v4/go/kubernetes/apps/v1"
corev1 "github.com/pulumi/pulumi-kubernetes/sdk/v4/go/kubernetes/core/v1"
metav1 "github.com/pulumi/pulumi-kubernetes/sdk/v4/go/kubernetes/meta/v1"
networkingv1 "github.com/pulumi/pulumi-kubernetes/sdk/v4/go/kubernetes/networking/v1"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
const hostnameBase = "X.X.X"
const flagFormat = "test{%s}"
const flag = "very-important-test-challenge"
const port = 80
func main() {
sdk.Run(func(req *sdk.Request, resp *sdk.Response, opts ...pulumi.ResourceOption) error {
// Dynamic hostname based on identity
hostname := fmt.Sprintf("%s.%s", req.Config.Identity, hostnameBase)
// Use chall-manager SDK for flag variation
flagVariantStringBase := sdk.Variate(req.Config.Identity, flag)
flagVariantString := fmt.Sprintf(flagFormat, flagVariantStringBase)
flagVariant := pulumi.Sprintf(flagFormat, flagVariantStringBase)
// Resource name prefix with identity to ensure uniqueness across deployments
resourceName := fmt.Sprintf("echo-test-%s", req.Config.Identity)
labels := pulumi.ToStringMap(map[string]string{
"chall-manager.ctfer.io/identity": req.Config.Identity,
"app": "echo-test",
})
// Create Deployment
_, err := appsv1.NewDeployment(req.Ctx, resourceName, &appsv1.DeploymentArgs{
Metadata: metav1.ObjectMetaArgs{
Labels: labels,
},
Spec: appsv1.DeploymentSpecArgs{
Selector: metav1.LabelSelectorArgs{
MatchLabels: labels,
},
Replicas: pulumi.Int(1),
Template: corev1.PodTemplateSpecArgs{
Metadata: metav1.ObjectMetaArgs{
Labels: labels,
},
Spec: corev1.PodSpecArgs{
Containers: corev1.ContainerArray{
corev1.ContainerArgs{
Name: pulumi.String("echo"),
Image: pulumi.String("traefik/whoami:latest"),
Ports: corev1.ContainerPortArray{
corev1.ContainerPortArgs{
ContainerPort: pulumi.Int(port),
},
},
Env: corev1.EnvVarArray{
corev1.EnvVarArgs{
Name: pulumi.String("WHOAMI_PORT_NUMBER"),
Value: pulumi.Sprintf("%d", port),
},
corev1.EnvVarArgs{
Name: pulumi.String("WHOAMI_NAME"),
Value: pulumi.String("FLAG=" + flagVariantString),
},
},
},
},
},
},
},
}, opts...)
if err != nil {
return err
}
// Create Service
svc, err := corev1.NewService(req.Ctx, resourceName, &corev1.ServiceArgs{
Metadata: metav1.ObjectMetaArgs{
Labels: labels,
},
Spec: corev1.ServiceSpecArgs{
Selector: labels,
Ports: corev1.ServicePortArray{
corev1.ServicePortArgs{
Port: pulumi.Int(port),
TargetPort: pulumi.Int(port),
Protocol: pulumi.String("TCP"),
},
},
},
}, opts...)
if err != nil {
return err
}
// Create Ingress with TLS
pathTypePrefix := "Prefix"
tlsSecretName := fmt.Sprintf("%s-tls", resourceName)
_, err = networkingv1.NewIngress(req.Ctx, resourceName, &networkingv1.IngressArgs{
Metadata: metav1.ObjectMetaArgs{
Labels: labels,
Annotations: pulumi.ToStringMap(map[string]string{
"kubernetes.io/ingress.class": "nginx",
"cert-manager.io/cluster-issuer": "letsencrypt-production",
}),
},
Spec: networkingv1.IngressSpecArgs{
Tls: networkingv1.IngressTLSArray{
networkingv1.IngressTLSArgs{
Hosts: pulumi.ToStringArray([]string{
hostname,
}),
SecretName: pulumi.String(tlsSecretName),
},
},
Rules: networkingv1.IngressRuleArray{
networkingv1.IngressRuleArgs{
Host: pulumi.String(hostname),
Http: networkingv1.HTTPIngressRuleValueArgs{
Paths: networkingv1.HTTPIngressPathArray{
networkingv1.HTTPIngressPathArgs{
Path: pulumi.String("/"),
PathType: pulumi.String(pathTypePrefix),
Backend: networkingv1.IngressBackendArgs{
Service: networkingv1.IngressServiceBackendArgs{
Name: svc.Metadata.Name().Elem(),
Port: networkingv1.ServiceBackendPortArgs{
Number: pulumi.Int(port),
},
},
},
},
},
},
},
},
},
}, opts...)
if err != nil {
return err
}
// Export outputs matching original format
resp.ConnectionInfo = pulumi.Sprintf("https://%s", hostname)
resp.Flag = flagVariant
return nil
})
}Code of Conduct
- I agree to follow this project's Code of Conduct
Metadata
Metadata
Assignees
Labels
bugSomething isn't workingSomething isn't workingchall-managerRelated to chall-managerRelated to chall-manager