Framework/Configurations/SVT/Services/DataLakeAnalytics.json
{
"FeatureName": "DataLakeAnalytics", "Reference": "aka.ms/azsdkosstcp/adla", "IsManintenanceMode": false, "Controls": [ { "ControlID": "Azure_DataLakeAnalytics_AuthZ_Assign_Required_RBAC_To_Creator", "Description": "Data Lake Analytics creator must be granted only required Role Based Access Control (RBAC) access on Subscription/Resource Group/Resource", "Id": "DataLakeAnalytics110", "ControlSeverity": "Medium", "Automated": "No", "MethodName": "", "Recommendation": "Assign 'Owner' privilege only at resource group and default data lake store account scope.", "Tags": [ "SDL", "TCP", "Manual", "AuthZ" ], "Enabled": true }, { "ControlID": "Azure_DataLakeAnalytics_AuthN_AAD_For_Client_AuthN", "Description": "All Data Lake Analytics users/service principal must be authenticated using AAD backed credentials", "Id": "DataLakeAnalytics120", "ControlSeverity": "High", "Automated": "No", "MethodName": "", "Recommendation": "No action required. ADLA supports only AAD authentication.", "Tags": [ "SDL", "Information", "Manual", "AuthN" ], "Enabled": true }, { "ControlID": "Azure_DataLakeAnalytics_AuthZ_Grant_Min_RBAC_Access", "Description": "All users/identities must be granted minimum required permissions using Role Based Access Control (RBAC)", "Id": "DataLakeAnalytics130", "ControlSeverity": "Medium", "Automated": "Yes", "MethodName": "CheckRBACAccess", "Recommendation": "Assign only the 'Data Lake Analytics Developer' RBAC role to developers who manage U-SQL jobs. Refer: https://docs.microsoft.com/en-us/azure/data-lake-analytics/data-lake-analytics-manage-use-portal#manage-users", "Tags": [ "SDL", "TCP", "Automated", "AuthZ", "RBAC" ], "Enabled": true }, { "ControlID": "Azure_DataLakeAnalytics_AuthZ_Assign_Least_Privilege_ACL", "Description": "Data Lake Analytics developer (user/service principal) must have least required ACLs on Catalog/Database and Data Lake Store file system", "Id": "DataLakeAnalytics140", "ControlSeverity": "Medium", "Automated": "No", "MethodName": "", "Recommendation": "Navigate to Azure Portal --> Data Lake Analytics Account --> Add User Wizard option to add users. Ensure least necessary privileges are granted.", "Tags": [ "SDL", "TCP", "Manual", "AuthZ" ], "Enabled": true }, { "ControlID": "Azure_DataLakeAnalytics_Config_Storage_Datasource_Securely", "Description": "Storage account data source must be added securely", "Id": "DataLakeAnalytics150", "ControlSeverity": "Medium", "Automated": "No", "MethodName": "", "Recommendation": "Setup storage data source using PowerShell command 'Add-AzureRmDataLakeAnalyticsDataSource -ResourceGroupName <ResourceGroup> -Account <ADLAAccount> -AzureBlob <StorageAccount> -AccessKey ((Get-AzureRmStorageAccountKey -ResourceGroupName <ResourceGroupOfStorageAccount> -Name <StorageAccountName>).Key1", "Tags": [ "SDL", "Best Practice", "Manual", "Config" ], "Enabled": true }, { "ControlID": "Azure_DataLakeAnalytics_DP_Encrypt_Connection_Strings", "Description": "Secrets to access SQL Azure/SQL VM/SQL Data Warehouse must be securely stored under Catalog database credentials", "Id": "DataLakeAnalytics160", "ControlSeverity": "High", "Automated": "No", "MethodName": "", "Recommendation": "Password of the account used to access SQL Azure must be created as a new catalog secret using the 'New-AzureRmDataLakeAnalyticsCatalogSecret' PS command. Refer: https://docs.microsoft.com/en-us/powershell/module/azurerm.datalakeanalytics/new-azurermdatalakeanalyticscatalogsecret. Then create credential object for this cataglog secret using the U-SQL command 'CREATE CREDENTIAL', Refer: https://docs.microsoft.com/en-us/azure/sql-data-warehouse/sql-data-warehouse-load-from-azure-data-lake-store#configure-the-data-source", "Tags": [ "SDL", "Best Practice", "Manual", "DP" ], "Enabled": true }, { "ControlID": "Azure_DataLakeAnalytics_SI_USQL_Script_Integrity", "Description": "U-SQL script file(s) must be uploaded from a secured/trusted location", "Id": "DataLakeAnalytics170", "ControlSeverity": "High", "Automated": "No", "MethodName": "", "Recommendation": "Make sure that the client machine used to upload scripts is secure (Antimalware, patching, etc.). Also, do not upload files that have originated from potentially untrusted sites.", "Tags": [ "SDL", "TCP", "Manual", "SI" ], "Enabled": true }, { "ControlID": "Azure_DataLakeAnalytics_Audit_Enable_Diagnostics_Log", "Description": "Diagnostics logs must be enabled with a retention period of at least $($this.ControlSettings.Diagnostics_RetentionPeriod_Min) days.", "Id": "DataLakeAnalytics180", "ControlSeverity": "Medium", "Automated": "Yes", "MethodName": "CheckDiagnosticsSettings", "Recommendation": "Enable 'Audit' and 'Requests' logs with retention days $($this.ControlSettings.Diagnostics_RetentionPeriod_Min) or $($this.ControlSettings.Diagnostics_RetentionPeriod_Forever) (= forever). Run PS command 'Set-AzureRmDiagnosticSetting -ResourceId <ResourceId> -Enable $true -StorageAccountId <StorageAccountId> -RetentionInDays 365 -RetentionEnabled $true'", "Tags": [ "SDL", "TCP", "Automated", "Audit", "Diagnostics" ], "Enabled": true }, { "ControlID": "Azure_DataLakeAnalytics_DP_Encrypt_At_Rest", "Description": "Sensitive data must be encrypted at rest", "Id": "DataLakeAnalytics190", "ControlSeverity": "High", "Automated": "Yes", "MethodName": "CheckEncryptionAtRest", "Recommendation": "Default Data Lake Store Account must have encryption enabled. Refer: https://docs.microsoft.com/en-us/azure/data-lake-store/data-lake-store-security-overview#data-protection", "Tags": [ "SDL", "TCP", "Automated", "DP" ], "Enabled": true }, { "ControlID": "Azure_DataLakeAnalytics_DP_Encrypt_In_Transit", "Description": "Sensitive data must be encrypted in transit", "Id": "DataLakeAnalytics200", "ControlSeverity": "High", "Automated": "No", "MethodName": "", "Recommendation": "No action required. ADLA provides encryption in transit using HTTPS transport layer security.", "Tags": [ "SDL", "Information", "Manual", "DP" ], "Enabled": true }, { "ControlID": "Azure_DataLakeAnalytics_BCDR_Plan_Default_Data_Lake_Store", "Description": "Backup and Disaster Recovery must be planned for the default Data Lake Store account", "Id": "DataLakeAnalytics210", "ControlSeverity": "Medium", "Automated": "No", "MethodName": "", "Recommendation": "Ensure that any critical business/catalog data in the default Data Lake Store has been backed up from a BC-DR standpoint.", "Tags": [ "SDL", "TCP", "Manual", "BCDR" ], "Enabled": true }, { "ControlID": "Azure_DataLakeAnalytics_Audit_Review_Logs", "Description": "Diagnostic and activity logs for Data Lake Analytics should be reviewed periodically", "Id": "DataLakeAnalytics220", "ControlSeverity": "Medium", "Automated": "No", "MethodName": "", "Recommendation": "Review diagnostic/activity logs to check activities on the resource. Refer: https://docs.microsoft.com/en-us/azure/monitoring-and-diagnostics/monitoring-overview-of-diagnostic-logs and https://docs.microsoft.com/en-us/azure/monitoring-and-diagnostics/monitoring-overview-activity-logs", "Tags": [ "SDL", "Best Practice", "Manual", "Audit" ], "Enabled": true } ] } |