From da78642bb1e21205f3e632b9f760cd7e2f75f30a Mon Sep 17 00:00:00 2001 From: Elena Terenzi Date: Fri, 20 Dec 2024 08:16:00 +0100 Subject: [PATCH] suppress non-error output and small fixes (#977) --- .../parking_sensors/scripts/clean_up.sh | 4 +- e2e_samples/parking_sensors/scripts/common.sh | 8 +-- .../scripts/deploy_azdo_pipelines.sh | 3 +- .../deploy_azdo_service_connections_azure.sh | 5 +- .../deploy_azdo_service_connections_github.sh | 5 +- .../scripts/deploy_azdo_variables.sh | 48 ++++++------- .../scripts/deploy_infrastructure.sh | 68 +++++++++---------- .../scripts/init_environment.sh | 4 +- 8 files changed, 73 insertions(+), 72 deletions(-) diff --git a/e2e_samples/parking_sensors/scripts/clean_up.sh b/e2e_samples/parking_sensors/scripts/clean_up.sh index bd28cbda9..9314d3d91 100755 --- a/e2e_samples/parking_sensors/scripts/clean_up.sh +++ b/e2e_samples/parking_sensors/scripts/clean_up.sh @@ -69,9 +69,7 @@ delete_all(){ [yY][eE][sS]|[yY]) log "Deleting pipelines that start with '$prefix' in name..." [[ -n $prefix ]] && - az pipelines list -o tsv | - { grep "$prefix" || true; } | - awk '{print $4}' | + az pipelines list -o tsv --query "[?contains(name, '$prefix')].id" | xargs -r -I % az pipelines delete --id % --yes log "Deleting variable groups that start with '$prefix' in name..." diff --git a/e2e_samples/parking_sensors/scripts/common.sh b/e2e_samples/parking_sensors/scripts/common.sh index 162c36bfc..3a5bc9fe4 100755 --- a/e2e_samples/parking_sensors/scripts/common.sh +++ b/e2e_samples/parking_sensors/scripts/common.sh @@ -107,23 +107,23 @@ create_adf_linked_service () { declare name=$1 log "Creating ADF LinkedService: $name" adfLsUrl="${adfFactoryBaseUrl}/linkedservices/${name}?api-version=${apiVersion}" - az rest --method put --uri "$adfLsUrl" --body @"${ADF_DIR}"/linkedService/"${name}".json + az rest --method put --uri "$adfLsUrl" --body @"${ADF_DIR}"/linkedService/"${name}".json -o none } create_adf_dataset () { declare name=$1 log "Creating ADF Dataset: $name" adfDsUrl="${adfFactoryBaseUrl}/datasets/${name}?api-version=${apiVersion}" - az rest --method put --uri "$adfDsUrl" --body @"${ADF_DIR}"/dataset/"${name}".json + az rest --method put --uri "$adfDsUrl" --body @"${ADF_DIR}"/dataset/"${name}".json -o none } create_adf_pipeline () { declare name=$1 log "Creating ADF Pipeline: $name" adfPUrl="${adfFactoryBaseUrl}/pipelines/${name}?api-version=${apiVersion}" - az rest --method put --uri "$adfPUrl" --body @"${ADF_DIR}"/pipeline/"${name}".json + az rest --method put --uri "$adfPUrl" --body @"${ADF_DIR}"/pipeline/"${name}".json -o none } create_adf_trigger () { declare name=$1 log "Creating ADF Trigger: $name" adfTUrl="${adfFactoryBaseUrl}/triggers/${name}?api-version=${apiVersion}" - az rest --method put --uri "$adfTUrl" --body @"${ADF_DIR}"/trigger/"${name}".json + az rest --method put --uri "$adfTUrl" --body @"${ADF_DIR}"/trigger/"${name}".json -o none } diff --git a/e2e_samples/parking_sensors/scripts/deploy_azdo_pipelines.sh b/e2e_samples/parking_sensors/scripts/deploy_azdo_pipelines.sh index 3447e0c88..a0f49cce3 100755 --- a/e2e_samples/parking_sensors/scripts/deploy_azdo_pipelines.sh +++ b/e2e_samples/parking_sensors/scripts/deploy_azdo_pipelines.sh @@ -64,4 +64,5 @@ cd_release_pipeline_id=$(create_azdo_pipeline "cd-release" "This pipeline releas az pipelines variable create \ --name devAdfName \ --pipeline-id "$cd_release_pipeline_id" \ - --value "$DEV_DATAFACTORY_NAME" + --value "$DEV_DATAFACTORY_NAME" \ + -o none diff --git a/e2e_samples/parking_sensors/scripts/deploy_azdo_service_connections_azure.sh b/e2e_samples/parking_sensors/scripts/deploy_azdo_service_connections_azure.sh index c1724fb0d..61c6ba5cc 100755 --- a/e2e_samples/parking_sensors/scripts/deploy_azdo_service_connections_azure.sh +++ b/e2e_samples/parking_sensors/scripts/deploy_azdo_service_connections_azure.sh @@ -65,7 +65,7 @@ export AZURE_DEVOPS_EXT_AZURE_RM_SERVICE_PRINCIPAL_KEY=$azure_devops_ext_azure_r if sc_id=$(az devops service-endpoint list -o json | jq -r -e --arg sc_name "$az_service_connection_name" '.[] | select(.name==$sc_name) | .id'); then log "Service connection: $az_service_connection_name already exists. Deleting service connection id $sc_id ..." "info" - az devops service-endpoint delete --id "$sc_id" -y + az devops service-endpoint delete --id "$sc_id" -y -o none fi log "Creating Azure service connection Azure DevOps" @@ -78,4 +78,5 @@ sc_id=$(az devops service-endpoint azurerm create \ az devops service-endpoint update \ --id "$sc_id" \ - --enable-for-all "true" \ No newline at end of file + --enable-for-all "true" \ + -o none \ No newline at end of file diff --git a/e2e_samples/parking_sensors/scripts/deploy_azdo_service_connections_github.sh b/e2e_samples/parking_sensors/scripts/deploy_azdo_service_connections_github.sh index fa4f5db61..ffe0eece8 100755 --- a/e2e_samples/parking_sensors/scripts/deploy_azdo_service_connections_github.sh +++ b/e2e_samples/parking_sensors/scripts/deploy_azdo_service_connections_github.sh @@ -47,7 +47,7 @@ export AZURE_DEVOPS_EXT_GITHUB_PAT=$GITHUB_PAT_TOKEN if sc_id=$(az devops service-endpoint list -o json | jq -r -e --arg sc_name "$github_sc_name" '.[] | select(.name==$sc_name) | .id'); then log "Service connection: $github_sc_name already exists. Deleting service connection id $sc_id ..." "info" - az devops service-endpoint delete --id "$sc_id" -y + az devops service-endpoint delete --id "$sc_id" -y -o none fi log "Creating Github service connection: $github_sc_name in Azure DevOps" @@ -59,4 +59,5 @@ github_sc_id=$(az devops service-endpoint github create \ az devops service-endpoint update \ --id "$github_sc_id" \ - --enable-for-all "true" \ No newline at end of file + --enable-for-all "true" \ + -o none \ No newline at end of file diff --git a/e2e_samples/parking_sensors/scripts/deploy_azdo_variables.sh b/e2e_samples/parking_sensors/scripts/deploy_azdo_variables.sh index 7e3adf17e..0bd7a76df 100755 --- a/e2e_samples/parking_sensors/scripts/deploy_azdo_variables.sh +++ b/e2e_samples/parking_sensors/scripts/deploy_azdo_variables.sh @@ -75,9 +75,9 @@ databricksClusterId="$DATABRICKS_CLUSTER_ID" # Create vargroup vargroup_name="${PROJECT}-release-$ENV_NAME" -if vargroup_id=$(az pipelines variable-group list -o tsv | grep "$vargroup_name" | awk '{print $3}'); then +if vargroup_id=$(az pipelines variable-group list -o json | jq -r -e --arg vg_name "$vargroup_name" '.[] | select(.name==$vg_name) | .id'); then log "Variable group: $vargroup_name already exists. Deleting..." "info" - az pipelines variable-group delete --id "$vargroup_id" -y + az pipelines variable-group delete --id "$vargroup_id" -y -o none fi log "Creating variable group: $vargroup_name" az pipelines variable-group create \ @@ -91,55 +91,57 @@ az pipelines variable-group create \ databricksNotebookPath="$databricksNotebookPath" \ databricksClusterId="$databricksClusterId" \ apiBaseUrl="$apiBaseUrl" \ - --output json + -o none # Create vargroup - for secrets vargroup_secrets_name="${PROJECT}-secrets-$ENV_NAME" -if vargroup_secrets_id=$(az pipelines variable-group list -o tsv | grep "$vargroup_secrets_name" | awk '{print $3}'); then +if vargroup_secrets_id=$(az pipelines variable-group list -o json | jq -r -e --arg vg_name "$vargroup_secrets_name" '.[] | select(.name==$vg_name) | .id'); then log "Variable group: $vargroup_secrets_name already exists. Deleting..." "info" - az pipelines variable-group delete --id "$vargroup_secrets_id" -y + az pipelines variable-group delete --id "$vargroup_secrets_id" -y -o none fi log "Creating variable group: $vargroup_secrets_name" vargroup_secrets_id=$(az pipelines variable-group create \ --name "$vargroup_secrets_name" \ --authorize "true" \ - --output json \ - --variables foo="bar" | jq -r .id) # Needs at least one secret + --output tsv \ + --variables foo="bar" \ + --query "id") # Needs at least one secret az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "subscriptionId" --value "$AZURE_SUBSCRIPTION_ID" + --secret "true" --name "subscriptionId" --value "$AZURE_SUBSCRIPTION_ID" -o none az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "kvUrl" --value "$KV_URL" + --secret "true" --name "kvUrl" --value "$KV_URL" -o none # sql server az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "sqlsrvrName" --value "$SQL_SERVER_NAME" + --secret "true" --name "sqlsrvrName" --value "$SQL_SERVER_NAME" -o none az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "sqlsrvrUsername" --value "$SQL_SERVER_USERNAME" + --secret "true" --name "sqlsrvrUsername" --value "$SQL_SERVER_USERNAME" -o none az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "sqlsrvrPassword" --value "$SQL_SERVER_PASSWORD" + --secret "true" --name "sqlsrvrPassword" --value "$SQL_SERVER_PASSWORD" -o none az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "sqlDwDatabaseName" --value "$SQL_DW_DATABASE_NAME" + --secret "true" --name "sqlDwDatabaseName" --value "$SQL_DW_DATABASE_NAME" -o none # Databricks az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "databricksDomain" --value "$DATABRICKS_HOST" + --secret "true" --name "databricksDomain" --value "$DATABRICKS_HOST" -o none az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "databricksToken" --value "$DATABRICKS_TOKEN" + --secret "true" --name "databricksToken" --value "$DATABRICKS_TOKEN" -o none az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "databricksWorkspaceResourceId" --value "$DATABRICKS_WORKSPACE_RESOURCE_ID" + --secret "true" --name "databricksWorkspaceResourceId" \ + --value "$DATABRICKS_WORKSPACE_RESOURCE_ID" -o none # Datalake az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "datalakeAccountName" --value "$AZURE_STORAGE_ACCOUNT" + --secret "true" --name "datalakeAccountName" --value "$AZURE_STORAGE_ACCOUNT" -o none az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "datalakeKey" --value "$AZURE_STORAGE_KEY" + --secret "true" --name "datalakeKey" --value "$AZURE_STORAGE_KEY" -o none # Adf az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "spAdfId" --value "$SP_ADF_ID" + --secret "true" --name "spAdfId" --value "$SP_ADF_ID" -o none az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "spAdfPass" --value "$SP_ADF_PASS" + --secret "true" --name "spAdfPass" --value "$SP_ADF_PASS" -o none az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "spAdfTenantId" --value "$SP_ADF_TENANT" + --secret "true" --name "spAdfTenantId" --value "$SP_ADF_TENANT" -o none az pipelines variable-group variable create --group-id "$vargroup_secrets_id" \ - --secret "true" --name "adfResourceId" --value "$DATAFACTORY_ID" + --secret "true" --name "adfResourceId" --value "$DATAFACTORY_ID" -o none # Delete dummy vars -az pipelines variable-group variable delete --group-id "$vargroup_secrets_id" --name "foo" -y \ No newline at end of file +az pipelines variable-group variable delete --group-id "$vargroup_secrets_id" --name "foo" -y -o none \ No newline at end of file diff --git a/e2e_samples/parking_sensors/scripts/deploy_infrastructure.sh b/e2e_samples/parking_sensors/scripts/deploy_infrastructure.sh index 0d4691179..50f9f74de 100755 --- a/e2e_samples/parking_sensors/scripts/deploy_infrastructure.sh +++ b/e2e_samples/parking_sensors/scripts/deploy_infrastructure.sh @@ -45,14 +45,12 @@ set -o nounset ### DEPLOY ARM TEMPLATE ##################### -# Set account to where ARM template will be deployed to log "Deploying to Subscription: $AZURE_SUBSCRIPTION_ID" "info" -az account set --subscription "$AZURE_SUBSCRIPTION_ID" # Create resource group resource_group_name="$PROJECT-$DEPLOYMENT_ID-$ENV_NAME-rg" log "Creating resource group: $resource_group_name" -az group create --name "$resource_group_name" --location "$AZURE_LOCATION" --tags Environment="$ENV_NAME" +az group create --name "$resource_group_name" --location "$AZURE_LOCATION" --tags Environment="$ENV_NAME" -o none # By default, set all KeyVault permission to deployer # Retrieve KeyVault User Id @@ -126,8 +124,8 @@ log "Retrieving KeyVault information from the deployment." kv_dns_name=https://${kv_name}.vault.azure.net/ # Store in KeyVault -az keyvault secret set --vault-name "$kv_name" --name "kvUrl" --value "$kv_dns_name" -az keyvault secret set --vault-name "$kv_name" --name "subscriptionId" --value "$AZURE_SUBSCRIPTION_ID" +az keyvault secret set --vault-name "$kv_name" --name "kvUrl" --value "$kv_dns_name" -o none +az keyvault secret set --vault-name "$kv_name" --name "subscriptionId" --value "$AZURE_SUBSCRIPTION_ID" -o none ######################### @@ -144,27 +142,27 @@ azure_storage_key=$(az storage account keys list \ # Add file system storage account storage_file_system=datalake log "Creating ADLS Gen2 File system: $storage_file_system" -az storage container create --name $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" +az storage container create --name $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" -o none log "Creating folders within the file system." # Create folders for databricks libs -az storage fs directory create -n '/sys/databricks/libs' -f $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" +az storage fs directory create -n '/sys/databricks/libs' -f $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" -o none # Create folders for SQL external tables -az storage fs directory create -n '/data/dw/fact_parking' -f $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" -az storage fs directory create -n '/data/dw/dim_st_marker' -f $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" -az storage fs directory create -n '/data/dw/dim_parking_bay' -f $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" -az storage fs directory create -n '/data/dw/dim_location' -f $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" +az storage fs directory create -n '/data/dw/fact_parking' -f $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" -o none +az storage fs directory create -n '/data/dw/dim_st_marker' -f $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" -o none +az storage fs directory create -n '/data/dw/dim_parking_bay' -f $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" -o none +az storage fs directory create -n '/data/dw/dim_location' -f $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" -o none log "Uploading seed data to data/seed" az storage blob upload --container-name $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" \ - --file data/seed/dim_date.csv --name "data/seed/dim_date/dim_date.csv" --overwrite + --file data/seed/dim_date.csv --name "data/seed/dim_date/dim_date.csv" --overwrite -o none az storage blob upload --container-name $storage_file_system --account-name "$azure_storage_account" --account-key "$azure_storage_key" \ - --file data/seed/dim_time.csv --name "data/seed/dim_time/dim_time.csv" --overwrite + --file data/seed/dim_time.csv --name "data/seed/dim_time/dim_time.csv" --overwrite -o none # Set Keyvault secrets -az keyvault secret set --vault-name "$kv_name" --name "datalakeAccountName" --value "$azure_storage_account" -az keyvault secret set --vault-name "$kv_name" --name "datalakeKey" --value "$azure_storage_key" -az keyvault secret set --vault-name "$kv_name" --name "datalakeurl" --value "https://$azure_storage_account.dfs.core.windows.net" +az keyvault secret set --vault-name "$kv_name" --name "datalakeAccountName" --value "$azure_storage_account" -o none +az keyvault secret set --vault-name "$kv_name" --name "datalakeKey" --value "$azure_storage_key" -o none +az keyvault secret set --vault-name "$kv_name" --name "datalakeurl" --value "https://$azure_storage_account.dfs.core.windows.net" -o none ################### # SQL @@ -183,11 +181,11 @@ sql_dw_connstr_uname=${sql_dw_connstr_nocred//$sql_server_username} sql_dw_connstr_uname_pass=${sql_dw_connstr_uname//$AZURESQL_SERVER_PASSWORD} # Store in Keyvault -az keyvault secret set --vault-name "$kv_name" --name "sqlsrvrName" --value "$sql_server_name" -az keyvault secret set --vault-name "$kv_name" --name "sqlsrvUsername" --value "$sql_server_username" -az keyvault secret set --vault-name "$kv_name" --name "sqlsrvrPassword" --value "$AZURESQL_SERVER_PASSWORD" -az keyvault secret set --vault-name "$kv_name" --name "sqldwDatabaseName" --value "$sql_dw_database_name" -az keyvault secret set --vault-name "$kv_name" --name "sqldwConnectionString" --value "$sql_dw_connstr_uname_pass" +az keyvault secret set --vault-name "$kv_name" --name "sqlsrvrName" --value "$sql_server_name" -o none +az keyvault secret set --vault-name "$kv_name" --name "sqlsrvUsername" --value "$sql_server_username" -o none +az keyvault secret set --vault-name "$kv_name" --name "sqlsrvrPassword" --value "$AZURESQL_SERVER_PASSWORD" -o none +az keyvault secret set --vault-name "$kv_name" --name "sqldwDatabaseName" --value "$sql_dw_database_name" -o none +az keyvault secret set --vault-name "$kv_name" --name "sqldwConnectionString" --value "$sql_dw_connstr_uname_pass" -o none #################### @@ -207,8 +205,8 @@ appinsights_connstr=$(az monitor app-insights component show \ jq -r '.connectionString') # Store in Keyvault -az keyvault secret set --vault-name "$kv_name" --name "applicationInsightsKey" --value "$appinsights_key" -az keyvault secret set --vault-name "$kv_name" --name "applicationInsightsConnectionString" --value "$appinsights_connstr" +az keyvault secret set --vault-name "$kv_name" --name "applicationInsightsKey" --value "$appinsights_key" -o none +az keyvault secret set --vault-name "$kv_name" --name "applicationInsightsConnectionString" --value "$appinsights_connstr" -o none @@ -236,10 +234,10 @@ sp_stor_id=$(echo "$sp_stor_out" | jq -r '.appId') sp_stor_pass=$(echo "$sp_stor_out" | jq -r '.password') sp_stor_tenant=$(echo "$sp_stor_out" | jq -r '.tenant') -az keyvault secret set --vault-name "$kv_name" --name "spStorName" --value "$sp_stor_name" -az keyvault secret set --vault-name "$kv_name" --name "spStorId" --value "$sp_stor_id" -az keyvault secret set --vault-name "$kv_name" --name "spStorPass" --value="$sp_stor_pass" -az keyvault secret set --vault-name "$kv_name" --name "spStorTenantId" --value "$sp_stor_tenant" +az keyvault secret set --vault-name "$kv_name" --name "spStorName" --value "$sp_stor_name" -o none +az keyvault secret set --vault-name "$kv_name" --name "spStorId" --value "$sp_stor_id" -o none +az keyvault secret set --vault-name "$kv_name" --name "spStorPass" --value="$sp_stor_pass" -o none ##=handles hyphen passwords +az keyvault secret set --vault-name "$kv_name" --name "spStorTenantId" --value "$sp_stor_tenant" -o none log "Generate Databricks token" databricks_host=https://$(echo "$arm_output" | jq -r '.properties.outputs.databricks_output.value.properties.workspaceUrl') @@ -262,9 +260,9 @@ databricks_token=$(DATABRICKS_TOKEN=$databricks_aad_token \ bash -c "databricks tokens create --comment 'deployment'" | jq -r .token_value) # Save in KeyVault -az keyvault secret set --vault-name "$kv_name" --name "databricksDomain" --value "$databricks_host" -az keyvault secret set --vault-name "$kv_name" --name "databricksToken" --value "$databricks_token" -az keyvault secret set --vault-name "$kv_name" --name "databricksWorkspaceResourceId" --value "$databricks_workspace_resource_id" +az keyvault secret set --vault-name "$kv_name" --name "databricksDomain" --value "$databricks_host" -o none +az keyvault secret set --vault-name "$kv_name" --name "databricksToken" --value "$databricks_token" -o none +az keyvault secret set --vault-name "$kv_name" --name "databricksWorkspaceResourceId" --value "$databricks_workspace_resource_id" -o none # Configure databricks (KeyVault-backed Secret scope, mount to storage via SP, databricks tables, cluster) # NOTE: must use Microsoft Entra access token, not PAT token @@ -300,7 +298,7 @@ jq --arg databricks_folder_name_transform "$databricks_folder_name_transform" ' datafactory_id=$(echo "$arm_output" | jq -r '.properties.outputs.datafactory_id.value') datafactory_name=$(echo "$arm_output" | jq -r '.properties.outputs.datafactory_name.value') -az keyvault secret set --vault-name "$kv_name" --name "adfName" --value "$datafactory_name" +az keyvault secret set --vault-name "$kv_name" --name "adfName" --value "$datafactory_name" -o none log "Modified sample files saved to directory: $adfTempDir" # Deploy ADF artifacts @@ -323,10 +321,10 @@ sp_adf_pass=$(echo "$sp_adf_out" | jq -r '.password') sp_adf_tenant=$(echo "$sp_adf_out" | jq -r '.tenant') # Save ADF SP credentials in Keyvault -az keyvault secret set --vault-name "$kv_name" --name "spAdfName" --value "$sp_adf_name" -az keyvault secret set --vault-name "$kv_name" --name "spAdfId" --value "$sp_adf_id" -az keyvault secret set --vault-name "$kv_name" --name "spAdfPass" --value="$sp_adf_pass" -az keyvault secret set --vault-name "$kv_name" --name "spAdfTenantId" --value "$sp_adf_tenant" +az keyvault secret set --vault-name "$kv_name" --name "spAdfName" --value "$sp_adf_name" -o none +az keyvault secret set --vault-name "$kv_name" --name "spAdfId" --value "$sp_adf_id" -o none +az keyvault secret set --vault-name "$kv_name" --name "spAdfPass" --value="$sp_adf_pass" -o none ##=handles hyphen passwords +az keyvault secret set --vault-name "$kv_name" --name "spAdfTenantId" --value "$sp_adf_tenant" -o none #################### # AZDO Azure Service Connection and Variables Groups diff --git a/e2e_samples/parking_sensors/scripts/init_environment.sh b/e2e_samples/parking_sensors/scripts/init_environment.sh index 711b04cca..bc6adbe23 100755 --- a/e2e_samples/parking_sensors/scripts/init_environment.sh +++ b/e2e_samples/parking_sensors/scripts/init_environment.sh @@ -25,9 +25,9 @@ fi az config set core.login_experience_v2=off az login --tenant $TENANT_ID az config set core.login_experience_v2=on -az account set -s $AZURE_SUBSCRIPTION_ID +az account set -s $AZURE_SUBSCRIPTION_ID -o none -az devops configure --defaults organization=$AZDO_ORGANIZATION_URL project=$AZDO_PROJECT +az devops configure --defaults organization=$AZDO_ORGANIZATION_URL project=$AZDO_PROJECT -o none # check required variables are specified.