Skip to content

Commit

Permalink
Add docker-backed acceptance test suite
Browse files Browse the repository at this point in the history
  • Loading branch information
pecigonzalo committed Oct 23, 2022
1 parent 6233244 commit d2a9ecb
Show file tree
Hide file tree
Showing 7 changed files with 223 additions and 36 deletions.
41 changes: 37 additions & 4 deletions docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,27 +4,60 @@ services:
zookeeper:
image: docker.io/bitnami/zookeeper:3.8
ports:
- "2181:2181"
- "2181"
volumes:
- "zookeeper_data:/bitnami"
environment:
- ALLOW_ANONYMOUS_LOGIN=yes
kafka:
kafka-0:
image: docker.io/bitnami/kafka:3.3
ports:
- "9092:9092"
environment:
- KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181
- KAFKA_CFG_BROKER_ID=0
- ALLOW_PLAINTEXT_LISTENER=yes
- KAFKA_CFG_LISTENERS=PLAINTEXT://:9092
- KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092
volumes:
- kafka_0_data:/bitnami/kafka
depends_on:
- zookeeper
kafka-1:
image: docker.io/bitnami/kafka:3.3
ports:
- "9093:9092"
environment:
- KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181
- KAFKA_CFG_BROKER_ID=1
- ALLOW_PLAINTEXT_LISTENER=yes
- KAFKA_CFG_LISTENERS=PLAINTEXT://:9092
- KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092
volumes:
- "kafka_data:/bitnami"
- kafka_1_data:/bitnami/kafka
depends_on:
- zookeeper
kafka-2:
image: docker.io/bitnami/kafka:3.3
ports:
- "9093:9092"
environment:
- KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181
- KAFKA_CFG_BROKER_ID=2
- ALLOW_PLAINTEXT_LISTENER=yes
- KAFKA_CFG_LISTENERS=PLAINTEXT://:9092
- KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092
volumes:
- kafka_2_data:/bitnami/kafka
depends_on:
- zookeeper

volumes:
zookeeper_data:
driver: local
kafka_data:
kafka_0_data:
driver: local
kafka_1_data:
driver: local
kafka_2_data:
driver: local
49 changes: 49 additions & 0 deletions internal/modifier/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
package modifier

import (
"context"

"github.com/hashicorp/terraform-plugin-framework/attr"
"github.com/hashicorp/terraform-plugin-framework/tfsdk"
)

// DefaultAttributePlanModifier is to set default value for an attribute
// https://github.com/hashicorp/terraform-plugin-framework/issues/285
type DefaultAttributePlanModifier struct {
value attr.Value
}

func DefaultAttribute(value attr.Value) DefaultAttributePlanModifier {
return DefaultAttributePlanModifier{value: value}
}

func (m DefaultAttributePlanModifier) Modify(
ctx context.Context,
req tfsdk.ModifyAttributePlanRequest,
resp *tfsdk.ModifyAttributePlanResponse,
) {
if req.AttributeConfig == nil || resp.AttributePlan == nil {
return
}

// if configuration was provided, then don't use the default
if !req.AttributeConfig.IsNull() {
return
}

// If the plan is known and not null (for example due to another plan modifier),
// don't set the default value
if !resp.AttributePlan.IsUnknown() && !resp.AttributePlan.IsNull() {
return
}

resp.AttributePlan = m.value
}

func (m DefaultAttributePlanModifier) Description(ctx context.Context) string {
return "Use a static default value for an attribute"
}

func (m DefaultAttributePlanModifier) MarkdownDescription(ctx context.Context) string {
return m.Description(ctx)
}
105 changes: 105 additions & 0 deletions internal/provider/provider_sweeper_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
package provider

import (
"log"
"os"
"testing"

"github.com/ory/dockertest/v3"
"github.com/ory/dockertest/v3/docker"
kafka "github.com/segmentio/kafka-go"
)

const (
existingTopic = "read.me"
)

// Configure mock Kafka cluster and teardown
func TestMain(t *testing.M) {
pool, err := dockertest.NewPool("")
if err != nil {
log.Fatalf("Could not connect to docker: %s", err)
}

network, err := pool.CreateNetwork("terraform-provider-kafka")
if err != nil {
log.Fatalf("Could not start network: %s", err)
}

zkContainer, err := pool.RunWithOptions(&dockertest.RunOptions{
Repository: "docker.io/bitnami/zookeeper",
Tag: "3.8",
Env: []string{"ALLOW_ANONYMOUS_LOGIN=yes"},
Hostname: "zookeeper",
NetworkID: network.Network.ID,
ExposedPorts: []string{"2181"},
})
if err != nil {
log.Fatalf("Could not start resource: %s", err)
}

kafkaContainer, err := pool.RunWithOptions(&dockertest.RunOptions{
Repository: "docker.io/bitnami/kafka",
Tag: "3.3",
Env: []string{
"KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181",
"KAFKA_CFG_BROKER_ID=0",
"ALLOW_PLAINTEXT_LISTENER=yes",
"KAFKA_CFG_LISTENERS=PLAINTEXT://:9092",
"KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092",
},
Hostname: "kafka-1",
NetworkID: network.Network.ID,
PortBindings: map[docker.Port][]docker.PortBinding{
"9092/tcp": {{HostIP: "localhost", HostPort: "9092/tcp"}},
},
})
if err != nil {
log.Fatalf("Could not start resource: %s", err)
}

waitForKafka := func() error {
conn, err := kafka.Dial("tcp", "localhost:9092")
if err != nil {
return err
}
defer conn.Close()

_, err = conn.ApiVersions()
if err != nil {
return err
}

// Bootstrap test topic
topic := kafka.TopicConfig{
Topic: existingTopic,
NumPartitions: 1,
ReplicationFactor: 1,
}
err = conn.CreateTopics(topic)
if err != nil {
return err
}

return nil
}

if err = pool.Retry(waitForKafka); err != nil {
log.Fatalf("could not connect to kafka: %s", err)
}

code := t.Run()

// You can't defer this because os.Exit doesn't care for defer
if err := pool.Purge(kafkaContainer); err != nil {
log.Fatalf("Could not purge resource: %s", err)
}
if err := pool.Purge(zkContainer); err != nil {
log.Fatalf("Could not purge resource: %s", err)
}
if err := pool.RemoveNetwork(network); err != nil {
log.Fatalf("Could not purge network: %s", err)
}

os.Exit(code)
}
10 changes: 1 addition & 9 deletions internal/provider/provider_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@ import (

"github.com/hashicorp/terraform-plugin-framework/providerserver"
"github.com/hashicorp/terraform-plugin-go/tfprotov6"
"github.com/ory/dockertest/v3"
"github.com/stretchr/testify/require"
)

const (
Expand Down Expand Up @@ -35,10 +33,4 @@ var testAccProtoV6ProviderFactories = map[string]func() (tfprotov6.ProviderServe
"kafka": providerserver.NewProtocol6WithError(New("test")()),
}

func testAccPreCheck(t *testing.T) {
pool, err := dockertest.NewPool("")
require.NoError(t, err, "could not connect to Docker")

pool.CreateNetwork("terraform-provider-kafka")

}
func testAccPreCheck(t *testing.T) {}
21 changes: 12 additions & 9 deletions internal/provider/topic_data_source_test.go
Original file line number Diff line number Diff line change
@@ -1,32 +1,35 @@
package provider

import (
"fmt"
"testing"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
)

func TestAccExampleDataSource(t *testing.T) {
func TestAccTopicDataSource(t *testing.T) {
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
// Read testing
{
Config: testAccExampleDataSourceConfig,
Config: testAccTopicDataSourceConfig(existingTopic),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("data.kafka_topic.test", "id", "example"),
resource.TestCheckResourceAttr("data.kafka_topic.test", "name", "example"),
resource.TestCheckResourceAttr("data.kafka_topic.test", "partitions", "3"),
resource.TestCheckResourceAttr("data.kafka_topic.test", "replication_factor", "3"),
resource.TestCheckResourceAttr("data.kafka_topic.test", "id", existingTopic),
resource.TestCheckResourceAttr("data.kafka_topic.test", "name", existingTopic),
resource.TestCheckResourceAttr("data.kafka_topic.test", "partitions", "1"),
resource.TestCheckResourceAttr("data.kafka_topic.test", "replication_factor", "1"),
),
},
},
})
}

const testAccExampleDataSourceConfig = providerConfig + `
func testAccTopicDataSourceConfig(name string) string {
return fmt.Sprintf(providerConfig+`
data "kafka_topic" "test" {
name = "example"
name = %[1]q
}
`, name)
}
`
9 changes: 7 additions & 2 deletions internal/provider/topic_resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/tfsdk"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-log/tflog"
"github.com/pecigonzalo/terraform-provider-kafka/internal/modifier"
kafka "github.com/segmentio/kafka-go"
"github.com/segmentio/topicctl/pkg/admin"
"github.com/segmentio/topicctl/pkg/apply/assigners"
Expand Down Expand Up @@ -89,8 +90,13 @@ func (r *TopicResource) GetSchema(ctx context.Context) (tfsdk.Schema, diag.Diagn
MarkdownDescription: "Configuration",
Type: types.MapType{ElemType: types.StringType},
Optional: true,
Computed: true,
PlanModifiers: []tfsdk.AttributePlanModifier{
resource.UseStateForUnknown(),
modifier.DefaultAttribute(types.Map{
ElemType: types.StringType,
Elems: map[string]attr.Value{},
}),
},
},
},
Expand Down Expand Up @@ -191,8 +197,7 @@ func (r *TopicResource) Read(ctx context.Context, req resource.ReadRequest, resp
data.Name = types.String{Value: topicInfo.Name}
data.Partitions = types.Int64{Value: int64(len(topicInfo.Partitions))}
data.ReplicationFactor = types.Int64{Value: int64(replicationFactor)}
// data.Version = types.Int64{Value: int64(topicInfo.Version)}
configElement := make(map[string]attr.Value)
configElement := map[string]attr.Value{}
for k, v := range topicInfo.Config {
configElement[k] = types.String{Value: v}
}
Expand Down
24 changes: 12 additions & 12 deletions internal/provider/topic_resource_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,21 +7,21 @@ import (
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
)

func TestAccExampleResource(t *testing.T) {
func TestAccTopicResource(t *testing.T) {
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
// Create and Read testing
{
Config: testAccExampleResourceConfig(
Config: testAccTopicResourceConfig(
"one",
3,
3,
1,
1,
),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("kafka_topic.test", "configurable_attribute", "one"),
resource.TestCheckResourceAttr("kafka_topic.test", "id", "example-id"),
resource.TestCheckResourceAttr("kafka_topic.test", "name", "one"),
resource.TestCheckResourceAttr("kafka_topic.test", "id", "one"),
),
},
// ImportState testing
Expand All @@ -33,25 +33,25 @@ func TestAccExampleResource(t *testing.T) {
// example code does not have an actual upstream service.
// Once the Read method is able to refresh information from
// the upstream service, this can be removed.
ImportStateVerifyIgnore: []string{"configurable_attribute"},
ImportStateVerifyIgnore: []string{"configuration"},
},
// Update and Read testing
{
Config: testAccExampleResourceConfig(
Config: testAccTopicResourceConfig(
"two",
3,
3,
1,
1,
),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("kafka_topic.test", "configurable_attribute", "two"),
resource.TestCheckResourceAttr("kafka_topic.test", "id", "two"),
),
},
// Delete testing automatically occurs in TestCase
},
})
}

func testAccExampleResourceConfig(name string, partitions int, replication_factor int) string {
func testAccTopicResourceConfig(name string, partitions int, replication_factor int) string {
return fmt.Sprintf(providerConfig+`
resource "kafka_topic" "test" {
name = %[1]q
Expand Down

0 comments on commit d2a9ecb

Please sign in to comment.