This repository has been archived by the owner on Apr 8, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 75
214 lines (184 loc) · 7.07 KB
/
test_integration_adapter.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
name: dbt-fal integration tests
on:
pull_request:
types: [assigned, opened, synchronize, reopened]
paths:
- "projects/adapter/**"
- ".github/workflows/test_integration_adapter.yml"
push:
branches: [main]
paths:
- "projects/adapter/**"
# schedule:
# every monday
# - cron: "0 0 * * 1"
workflow_dispatch:
jobs:
run:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
profile:
- postgres
- bigquery
- snowflake
# TODO: enable as 1.5 becomes available for following adapters
# - trino
# - duckdb
# - sqlserver
dbt_version:
- "1.7.0"
python:
- "3.8"
- "3.9"
- "3.10"
# - "3.11"
include:
- profile: snowflake
teleport: true
cloud: true
- profile: bigquery
cloud: true
concurrency:
group: "${{ github.head_ref || github.run_id }}-${{ github.workflow }}-${{ matrix.profile }}-${{ matrix.python }}"
cancel-in-progress: true
steps:
- uses: actions/checkout@v2
with:
path: "fal"
- name: Start Docker database
working-directory: fal/projects/adapter/cli_tests
if: contains(fromJSON('["postgres"]'), matrix.profile)
run: docker-compose up -d
- name: Start trino
working-directory: fal/projects/adapter/integration_tests/configs/trino
if: contains(fromJSON('["trino"]'), matrix.profile)
run: docker-compose up -d
- name: Setup sqlserver dependencies
if: contains(fromJSON('["sqlserver"]'), matrix.profile)
run: |
sudo apt install unixodbc-dev
sudo curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
curl https://packages.microsoft.com/config/ubuntu/$(lsb_release -rs)/prod.list > prod.list
sudo cp prod.list /etc/apt/sources.list.d/mssql-release.list
sudo apt update
sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18
- name: Start sqlserver
working-directory: fal/projects/adapter/integration_tests/configs/sqlserver
if: contains(fromJSON('["sqlserver"]'), matrix.profile)
run: |
docker-compose up -d
- name: Setup latest dependencies
working-directory: fal/projects/adapter/integration_tests
run: |
python -m venv .venv
source .venv/bin/activate
pip install --upgrade pip
ADAPTER_PACKAGE="dbt-${{ matrix.profile }}"
if [[ -n '${{ matrix.dbt_version }}' ]]
then
ADAPTER_PACKAGE="${ADAPTER_PACKAGE}==${{ matrix.dbt_version }}"
fi
pushd ..
EXTRAS="${{ matrix.profile }}"
if [[ '${{ matrix.teleport }}' == 'true' ]]
then
EXTRAS="$EXTRAS,teleport"
fi
if [[ '${{ matrix.cloud }}' == 'true' ]]
then
EXTRAS="$EXTRAS,cloud"
fi
DBT_FAL_PACKAGE=".[$EXTRAS]"
echo "pip install $ADAPTER_PACKAGE -e $DBT_FAL_PACKAGE"
pip install $ADAPTER_PACKAGE -e $DBT_FAL_PACKAGE
popd
- name: Setup behave
working-directory: fal/projects/adapter/integration_tests
run: pip install behave
- name: Run tests
id: test_run
working-directory: fal/projects/adapter/integration_tests
env:
FAL_STATS_ENABLED: false
# Teleport
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# Isolate Cloud
FAL_HOST: ${{ secrets.FAL_HOST }}
FAL_KEY_SECRET: ${{ secrets.FAL_KEY_SECRET }}
FAL_KEY_ID: ${{ secrets.FAL_KEY_ID }}
# BigQuery
KEYFILE: ${{ secrets.GCP_SA_KEY }}
GCLOUD_PROJECT: ${{ secrets.GCP_PROJECT_ID }}
BQ_DATASET: ${{ secrets.BQ_DATASET }}
# Snowflake
SF_ACCOUNT: ${{ secrets.SF_ACCOUNT }}
SF_USER: ${{ secrets.SF_USER }}
SF_PASSWORD: ${{ secrets.SF_PASSWORD }}
SF_ROLE: ${{ secrets.SF_ROLE }}
SF_DATABASE: ${{ secrets.SF_DATABASE }}
SF_WAREHOUSE: ${{ secrets.SF_WAREHOUSE }}
SF_SCHEMA: ${{ secrets.SF_SCHEMA }}
# Duckdb
DB_PATH: ${{ github.workspace }}/duck.db
run: |
source .venv/bin/activate
# Database and schema setup for sources
if [[ '${{ matrix.profile }}' == "bigquery" ]]
then
export DBT_DATABASE="$GCLOUD_PROJECT" DBT_SCHEMA="$BQ_DATASET"
fi
if [[ '${{ matrix.profile }}' == "snowflake" ]]
then
export DBT_DATABASE="$SF_DATABASE" DBT_SCHEMA="$SF_SCHEMA"
fi
if [[ '${{ matrix.profile }}' == "duckdb" ]]
then
# TODO: which to use for sources? Example:
# database: "{{ env_var('DBT_DATABASE', 'test') }}"
# schema: "{{ env_var('DBT_SCHEMA', 'dbt_fal') }}"
export DBT_DATABASE="" DBT_SCHEMA=""
fi
if [[ '${{ matrix.profile }}' == "bigquery" ]]
then
export GCLOUD_PRIVATE_KEY_ID=$(echo $KEYFILE | jq '.private_key_id' | tr -d '"')
export RAW_PRIVATE_KEY=$(echo $KEYFILE | jq '.private_key' | tr -d '"')
export GCLOUD_PRIVATE_KEY="${RAW_PRIVATE_KEY//'\n'/$'\n'}"
export GCLOUD_CLIENT_EMAIL=$(echo $KEYFILE | jq '.client_email' | tr -d '"')
export GCLOUD_CLIENT_ID=$(echo $KEYFILE | jq '.client_id' | tr -d '"')
export GCLOUD_X509_CERT_URL=$(echo $KEYFILE | jq '.client_x509_cert_url' | tr -d '"')
fi
# Could not get the real job_id easily from context
UUID=$(uuidgen | head -c8)
export DB_NAMESPACE="${{ github.run_id }}_${UUID}"
BEHAVE_TAGS="--tags=-TODO-${{ matrix.profile }}"
if [[ '${{ matrix.teleport }}' != 'true' ]]
then
BEHAVE_TAGS="$BEHAVE_TAGS --tags=-teleport"
fi
if [[ '${{ matrix.cloud }}' != 'true' ]]
then
BEHAVE_TAGS="$BEHAVE_TAGS --tags=-cloud"
fi
if [[ -z "${GITHUB_HEAD_REF}" ]]
then
export FAL_GITHUB_BRANCH=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}}
else
export FAL_GITHUB_BRANCH=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}
fi
behave $BEHAVE_TAGS -fplain -D profile=${{ matrix.profile }}
- name: Send custom JSON data to Slack workflow
if: (failure() || cancelled()) && github.event_name == 'schedule'
id: slack
uses: slackapi/[email protected]
with:
# For posting a rich message using Block Kit
payload: |
{
"text": "Integration tests failed for dbt-${{ matrix.profile }}@${{ matrix.dbt }} (Python ${{ matrix.python }})\nhttps://github.com/fal-ai/fal/actions/runs/${{ github.run_id }}"
}
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK