Updated on 2024-09-30 GMT+08:00

Agency Permission Policies in Common Scenarios

This section provides agency permission policies for common scenarios, which can be used to configure agency permission policies when you customize your permissions. The "Resource" in the agency policy should be replaced according to specific needs.

Data Cleanup Agency Permission Configuration

Application scenario: Data cleanup agency, which is used to clean up data according to the lifecycle of a table and clean up lakehouse table data. You need to create an agency and customize permissions for it. However, the agency name is fixed to dli_data_clean_agency.

Set the authorization scope of an agency as follows:

  • For an OBS agency, select Global services.
  • For a DLI agency, select Region-specific projects.
{
    "Version": "1.1",
    "Statement": [
        {
            "Effect": "Allow",
            "Action": [
                "obs:object:GetObject",
                "obs:object:DeleteObject",
                "obs:bucket:HeadBucket",
                "obs:bucket:ListBucket",
                "obs:object:PutObject"
            ]
        }
    ]
}

{
    "Version": "1.1",
    "Statement": [
        {
            "Effect": "Allow",
            "Action": [
                "dli:table:showPartitions",
                "dli:table:select",
                "dli:table:dropTable",
                "dli:table:alterTableDropPartition"
            ]
        }
    ]
}

Permission Policies for Accessing and Using OBS

Application scenario: For DLI Flink jobs, the permissions include downloading OBS objects, obtaining OBS/GaussDB(DWS) data sources (foreign tables), transferring logs, using savepoints, and enabling checkpointing. For DLI Spark jobs, the permissions allow downloading OBS objects and reading/writing OBS foreign tables.

{
    "Version": "1.1",
    "Statement": [
        {
            "Effect": "Allow",
            "Action": [
                "obs:bucket:GetBucketPolicy",
                "obs:bucket:GetLifecycleConfiguration",
                "obs:bucket:GetBucketLocation",
                "obs:bucket:ListBucketMultipartUploads",
                "obs:bucket:GetBucketLogging",
                "obs:object:GetObjectVersion",
                "obs:bucket:GetBucketStorage",
                "obs:bucket:GetBucketVersioning",
                "obs:object:GetObject",
                "obs:object:GetObjectVersionAcl",
                "obs:object:DeleteObject",
                "obs:object:ListMultipartUploadParts",
                "obs:bucket:HeadBucket",
                "obs:bucket:GetBucketAcl",
                "obs:bucket:GetBucketStoragePolicy",
                "obs:object:AbortMultipartUpload",
                "obs:object:DeleteObjectVersion",
                "obs:object:GetObjectAcl",
                "obs:bucket:ListBucketVersions",
                "obs:bucket:ListBucket",
                "obs:object:PutObject"
            ],
            "Resource": [
                "OBS:*:*:bucket:bucketName",// Replace bucketName with the actual bucket name.
                "OBS:*:*:object:*"
            ]
        },
        {
            "Effect": "Allow",
            "Action": [
                "obs:bucket:ListAllMyBuckets"
            ]
        }
    ]
}

Permission to Use DEW's Encryption Function

Application scenario: DLI Flink and Spark jobs use DEW-CSMS' secret management.

{
    "Version": "1.1",
    "Statement": [
        {
            "Effect": "Allow",
            "Action": [
                "csms:secretVersion:get",
                "csms:secretVersion:list",
                "kms:dek:decrypt"
            ]
        }
    ]
}

Permission to Access DLI Catalog Metadata

Application scenario: DLI Flink and Spark jobs are authorized to access DLI metadata.

{
    "Version": "1.1",
    "Statement": [
        {
            "Effect": "Allow",
            "Action": [
                "dli:table:showPartitions",
                "dli:table:alterTableAddPartition",
                "dli:table:alterTableAddColumns",
                "dli:table:alterTableRenamePartition",
                "dli:table:delete",
                "dli:column:select",
                "dli:database:dropFunction",
                "dli:table:insertOverwriteTable",
                "dli:table:describeTable",
                "dli:database:explain",
                "dli:table:insertIntoTable",
                "dli:database:createDatabase",
                "dli:table:alterView",
                "dli:table:showCreateTable",
                "dli:table:alterTableRename",
                "dli:table:compaction",
                "dli:database:displayAllDatabases",
                "dli:database:dropDatabase",
                "dli:table:truncateTable",
                "dli:table:select",
                "dli:table:alterTableDropColumns",
                "dli:table:alterTableSetProperties",
                "dli:database:displayAllTables",
                "dli:database:createFunction",
                "dli:table:alterTableChangeColumn",
                "dli:database:describeFunction",
                "dli:table:showSegments",
                "dli:database:createView",
                "dli:database:createTable",
                "dli:table:showTableProperties",
                "dli:database:showFunctions",
                "dli:database:displayDatabase",
                "dli:table:alterTableRecoverPartition",
                "dli:table:dropTable",
                "dli:table:update",
                "dli:table:alterTableDropPartition"
            ]
        }
    ]
}

Permission to Access LakeFormation Catalog Metadata

Application scenario: DLI Spark jobs are authorized to access LakeFormation metadata.

{
    "Version": "1.1",
    "Statement": [
        {
            "Effect": "Allow",
            "Action": [
                "lakeformation:table:drop",
                "lakeformation:table:create",
                "lakeformation:policy:create",
                "lakeformation:database:create",
                "lakeformation:database:drop",
                "lakeformation:database:describe",
                "lakeformation:catalog:alter",
                "lakeformation:table:alter",
                "lakeformation:database:alter",
                "lakeformation:catalog:create",
                "lakeformation:function:describe",
                "lakeformation:catalog:describe",
                "lakeformation:function:create",
                "lakeformation:table:describe",
                "lakeformation:function:drop",               
                "lakeformation:transaction:operate"
            ]
        }
    ]
}