aws / aws-cdk

The AWS Cloud Development Kit is a framework for defining cloud infrastructure in code
https://aws.amazon.com/cdk
Apache License 2.0
11.65k stars 3.91k forks source link

(eks): Re-use imported Cluster kubectlProvider between stacks #23409

Closed deuscapturus closed 1 year ago

deuscapturus commented 1 year ago

Describe the feature

We have a problem with a large sprawl of VPC connected Lambda functions due to kubectlProvider from a imported Cluster being recreated multiple times across several stacks; to the point our account reached the maximum limit for Lambda HyperPlane ENI's.

It would be better to construct the imported eks.Cluster in one stack and share it, along with its kubectlProvider to other stacks.

However, the app below will create 3 duplicated nested stacks with Lambda functions:

import { Cluster, HelmChart, ICluster } from 'aws-cdk-lib/aws-eks'
import { App, Stack, StackProps } from 'aws-cdk-lib';
import { Construct } from 'constructs';

const app = new App();

class MyEksCluster extends Stack {

  cluster: ICluster

  constructor(scope: Construct, id: string, props?: StackProps) {
    super(scope, id, props);

    this.cluster = Cluster.fromClusterAttributes(this, "EKSCluster", {
      clusterName: "test-cluster",
      kubectlRoleArn: "arn:aws:iam::123456789012:role/kubectl-role",
    })
  }
}

interface HelmStackProps extends StackProps {
  cluster: ICluster
}
class MyHelmChart extends Stack {

  constructor(scope: Construct, id: string, props: HelmStackProps) {
    super(scope, id, props);

    new HelmChart(this, `${this.stackName}-HelmChart`, {
      cluster: props.cluster,
      repository: "https://my-repo",
      namespace: "test-cdk",
      chart: "mychart",
    })
  }
}

const eksCluster = new MyEksCluster(app, "shared-eks-cluster")

new MyHelmChart(app, 'helmchart-1', {
  cluster: eksCluster.cluster,
});
new MyHelmChart(app, 'helmchart-2', {
  cluster: eksCluster.cluster,
});
new MyHelmChart(app, 'helmchart-3', {
  cluster: eksCluster.cluster,
});

Use Case

To reduce the number of VPC bound Lambdas in our accounts.

Proposed Solution

It should be possible to construct an imported eks.Cluster with a kubectlProvider.

See: https://github.com/aws/aws-cdk/pull/23380

Other Information

Cannot create kubectlProvider when Cluster is constructed because cluster cannot be used before assignment:

will not work:

    this.cluster = Cluster.fromClusterAttributes(this, "EKSCluster", {
      clusterName: "test-cluster",
      kubectlRoleArn: "arn:aws:iam::123456789012:role/kubectl-role",
      kubectlProvider: new KubectlProvider(this, "KubectlProvider", {
        cluster: this.cluster
      }),
    })

Alternatively we could share the eks.Cluster between stacks and add kubernetes resources using the methods: props.cluster.addHelmChart(). This puts all of the kuberetes resources in the same stack as the eks.Cluster with a single kubectlProvider. But we want these resources in their own stacks, not in one.

Acknowledgements

CDK version used

2.55.1

Environment details (OS name and version, etc.)

Linux

comcalvi commented 1 year ago

Found a workaround that keeps the kubectl provider count down. Please let me know if this solves your issue:

lib:

import * as cdk from 'aws-cdk-lib';
import { Construct } from 'constructs';
import * as eks from 'aws-cdk-lib/aws-eks';

export class EksClusterProviderStack extends cdk.Stack {
  public readonly cluster: eks.ICluster;
  constructor(scope: Construct, id: string, props?: cdk.StackProps) {
    super(scope, id, props);

    this.cluster = new eks.Cluster(this, 'cluster', {
      version: eks.KubernetesVersion.V1_22,
    });
  }
}

export interface ClusterImportStackProps extends cdk.StackProps {
  cluster: eks.ICluster;
}

export class EksClusterImportStack extends cdk.Stack {
  public readonly importedCluster: eks.ICluster;
  constructor(scope: Construct, id: string, props: ClusterImportStackProps) {
    super(scope, id, props);

    const temp = eks.Cluster.fromClusterAttributes(this, `tempCluster`, {
      clusterName: props.cluster.clusterName,
      kubectlRoleArn: props.cluster.kubectlRole?.roleArn,
    });
    const kubectlProvider = eks.KubectlProvider.getOrCreate(this, temp);

    this.importedCluster = eks.Cluster.fromClusterAttributes(this, 'importedCluster', {
      clusterName: props.cluster.clusterName,
      kubectlProvider
    });
  }
}

export interface HelmStackProps extends cdk.StackProps {
  cluster: eks.ICluster;
  chart: string,
  repository: string,
}

export class HelmStack extends cdk.Stack {
  constructor(scope: Construct, id: string, props: HelmStackProps) {
    super(scope, id, props);

    new eks.HelmChart(this, 'helmChart', {
      cluster: props.cluster,
      chart: props.chart,
      repository: props.repository,
    });
  }
}

import * as cdk from 'aws-cdk-lib';
import { Construct } from 'constructs';
import * as eks from 'aws-cdk-lib/aws-eks';

export class EksClusterProviderStack extends cdk.Stack {
  public readonly cluster: eks.ICluster;
  constructor(scope: Construct, id: string, props?: cdk.StackProps) {
    super(scope, id, props);

    this.cluster = new eks.Cluster(this, 'cluster', {
      version: eks.KubernetesVersion.V1_22,
    });
  }
}

export interface ClusterImportStackProps extends cdk.StackProps {
  cluster: eks.ICluster;
}

export class EksClusterImportStack extends cdk.Stack {
  public readonly importedCluster: eks.ICluster;
  constructor(scope: Construct, id: string, props: ClusterImportStackProps) {
    super(scope, id, props);

    const temp = eks.Cluster.fromClusterAttributes(this, `tempCluster`, {
      clusterName: props.cluster.clusterName,
      kubectlRoleArn: props.cluster.kubectlRole?.roleArn,
    });
    const kubectlProvider = eks.KubectlProvider.getOrCreate(this, temp);

    this.importedCluster = eks.Cluster.fromClusterAttributes(this, 'importedCluster', {
      clusterName: props.cluster.clusterName,
      kubectlProvider
    });
  }
}

export interface HelmStackProps extends cdk.StackProps {
  cluster: eks.ICluster;
  chart: string,
  repository: string,
}

export class HelmStack extends cdk.Stack {
  constructor(scope: Construct, id: string, props: HelmStackProps) {
    super(scope, id, props);

    new eks.HelmChart(this, 'helmChart', {
      cluster: props.cluster,
      chart: props.chart,
      repository: props.repository,
    });
  }
}

bin:

#!/usr/bin/env node
import 'source-map-support/register';
import * as cdk from 'aws-cdk-lib';
import { EksClusterProviderStack, EksClusterImportStack, HelmStack } from '../lib/eks-stack';

const app = new cdk.App();

const cluster = new EksClusterProviderStack(app, 'EksClusterProviderStack', { env: { region: 'eu-central-1' }} ).cluster;

const clusterImportStack = new EksClusterImportStack(app, 'EksClusterImportStack', {
  cluster,
  env: { region: 'eu-central-1' }
});

const importedCluster = clusterImportStack.importedCluster;

const helmStackA = new HelmStack(app, 'HelmStack', {
  cluster: importedCluster,
  chart: 'nginx-ingress',
  repository: 'https://helm.nginx.com/stable',
  env: { region: 'eu-central-1' }
});

const helmStackB = new HelmStack(app, 'HelmStack2', {
  cluster: importedCluster,
  chart: 'hello-world',
  repository: 'https://helm.github.io/examples',
  env: { region: 'eu-central-1' }
});

const helmStackC = new HelmStack(app, 'HelmStack3', {
  cluster: importedCluster,
  chart: 'nginx-service-mesh',
  repository: 'https://helm.nginx.com/stable',
  env: { region: 'eu-central-1' }
});
github-actions[bot] commented 1 year ago

This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled.

github-actions[bot] commented 1 year ago

⚠️COMMENT VISIBILITY WARNING⚠️

Comments on closed issues are hard for our team to see. If you need more assistance, please either tag a team member or open a new issue that references this one. If you wish to keep having a conversation with other community members under this issue feel free to do so.

caretak3r commented 10 months ago

Has there been any updates on this?

diranged commented 9 months ago

This is still pretty painful... I find it surprising that there's no "clean" way to instantiate an ImportedCluster along with a KubectlProvider for it. I had to follow @comcalvi 's model and import the cluster twice.

caretak3r commented 9 months ago

This is still pretty painful... I find it surprising that there's no "clean" way to instantiate an ImportedCluster along with a KubectlProvider for it. I had to follow @comcalvi 's model and import the cluster twice.

So I was able to figure this out, and have this implemented cleanly.

  1. We have a few stacks that get deployed after eks_stack.py: (alb_stack.py, logging_stack.py, cert_manager_stack.py).
  2. In eks_stack.py we create this object variable, so once the cluster is completed, we can use it elsewhere:

stacks/eks_stack.py

class EksStack(Stack):
    cluster: eks.Cluster
  1. In app.py I pass the ICluster object to alb_stack.py:

app.py

alb_stack = AlbStack(
    app,
    "AlbStack",
    vpc=base_stack.vpc,
    eks_cluster=eks_stack.cluster,
    stack_name=f"{constants.STACK_PREFIX}-alb-stack-{constants.STACK_SUFFIX}",
    **env,
)
  1. Then in the respective stack (e.g alb_stack.py) we import the cluster:

stacks/alb_stack.py

from lib import kubectl_helper
...
class AlbStack(Stack):
    def __init__(
        self,
        scope: Construct,
        construct_id: str,
        vpc: ec2.IVpc,
        eks_cluster: eks.ICluster,
        **kwargs,
    ) -> None:
        super().__init__(scope, construct_id, **kwargs)

        # Import cluster object
        self.cluster = kubectl_helper.import_cluster(self, eks_cluster)
        ...
        alb_service_account = **self.cluster**.add_service_account...
  1. Each stack calls our helper functions to import existing cluster:

lib/kubectl_helper.py

from aws_cdk import aws_eks as eks, aws_iam as iam

def get_kubectl_provider(scope, cluster) -> eks.IKubectlProvider:
    return eks.KubectlProvider.get_or_create(scope=scope, cluster=cluster)

def import_cluster(scope, cluster) -> eks.ICluster:
    cluster_name = cluster.cluster_name
    kubectl_role_arn = cluster.kubectl_role.role_arn
    open_id_connect_provider_arn = (
        cluster.open_id_connect_provider.open_id_connect_provider_arn
    )
    kubectl_provider = get_kubectl_provider(scope, cluster)

    return eks.Cluster.from_cluster_attributes(
        scope,
        "ImportedEksCluster",
        cluster_name=cluster_name,
        kubectl_role_arn=kubectl_role_arn,
        open_id_connect_provider=iam.OpenIdConnectProvider.from_open_id_connect_provider_arn(
            scope,
            "ImportedOpenIdConnectProvider",
            open_id_connect_provider_arn=open_id_connect_provider_arn,
        ),
        kubectl_provider=kubectl_provider,
    )