Skip to content

Commit 0e7bfc1

Browse files
Merge pull request feast-dev#54 from farfetch-external/add_artifact_upload
Add support for uploading the Python SDK artifact from CI
2 parents 4ea5a12 + 6d7950a commit 0e7bfc1

File tree

5 files changed

+93
-21
lines changed

5 files changed

+93
-21
lines changed

.github/workflows/docker_build_test_push.yml

Lines changed: 31 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,10 @@ on:
1313
env:
1414
# - change per env
1515
TF_VAR_project_name: fffeastcd
16+
17+
# - account used to access the internal pypi repository
18+
TF_VAR_pypi_user: ci
19+
1620
# - optionally change per env
1721
TERRAFORM_BACKEND_STORAGE_ACCOUNT_RESOURCE_GROUP_NAME: fffeasttf
1822
# - change per env
@@ -166,6 +170,7 @@ jobs:
166170
echo "::set-env name=TF_VAR_feast_serving_image_repository::$FEAST_SERVING_IMAGE"
167171
echo "::set-env name=TF_VAR_feast_version::$FEAST_VERSION"
168172
echo "::set-env name=TF_VAR_run_number::$GITHUB_RUN_NUMBER"
173+
echo "::set-env name=TF_VAR_pypi_password:::${{ secrets.PYPI_PWD }}"
169174
170175
- name: Terraform init app
171176
# cache plugin locally to avoid re-downloading it at every run
@@ -194,12 +199,15 @@ jobs:
194199
databricks runs list --active-only | cut -f1 -d ' ' | xargs -tr -n1 databricks runs cancel --run-id
195200
working-directory: infra/terraform/app
196201

202+
- name: Compile Python protos
203+
run: set -e make compile-protos-python
204+
197205
- uses: azure/setup-kubectl@v1
198206

199207
- name: Prepare e2e test pod and clear Redis data
200208
run: |
201209
set -ex
202-
210+
203211
kubectl delete secret azure-e2e-test || true
204212
kubectl create secret generic azure-e2e-test --from-literal=AZURE_CLIENT_SECRET="$ARM_CLIENT_SECRET"
205213
@@ -262,9 +270,31 @@ jobs:
262270
pytest -x -rA -s test-ingest.py --core_url feast-core:6565 --serving_url=feast-serving-feast-online-serving:6566
263271
"
264272
273+
- name: Upload package to Pypi
274+
run: |
275+
set -ex
276+
git fetch --unshallow
277+
git stash
278+
python setup.py sdist
279+
pip install twine
280+
twine upload dist/*.tar.gz --repository-url https://${{ secrets.PYPI_REPO }} -u ${{ secrets.PYPI_USER }} -p ${{ secrets.PYPI_PWD }}
281+
working-directory: sdk/python
282+
283+
- name: Prepare e2e test pod and clear Redis data
284+
run: |
285+
set -ex
286+
# Start e2e test pod
287+
kubectl delete pod "$E2E_TEST_POD" || true
288+
kubectl run "$E2E_TEST_POD" --restart=Never --image="$FEAST_JUPYTER_IMAGE:$FEAST_VERSION" --limits 'memory=4Gi'
289+
290+
# While the test pod is starting, clear all data from Redis
291+
kubectl run -it --rm --restart=Never --image=redis redisflush$GITHUB_RUN_NUMBER --command -- redis-cli -h $TF_VAR_redis_hostname FLUSHALL
292+
293+
kubectl wait --timeout 45m --for=condition=ContainersReady pod "$E2E_TEST_POD"
265294
- name: Delete e2e test pod
266295
run: kubectl delete pod "$E2E_TEST_POD"
267296

297+
268298
- name: Output logs
269299
if: ${{ always() }}
270300
run: |

cloudbuild.yaml

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -97,20 +97,8 @@ steps:
9797
dir: '/workspace/sdk/python'
9898
entrypoint: sh
9999
args: ['-c', 'pip install -r requirements-ci.txt && pip install -e . && pytest --junitxml=/log/python-sdk-test-report.xml']
100-
- id: clean-git-state
101-
waitFor: ['unit-test-python-sdk']
102-
name: gcr.io/cloud-builders/git
103-
args: ['stash']
104-
- id: upload-python-package
105-
waitFor: ['clean-git-state']
106-
name: 'python:3.7-buster'
107-
dir: '/workspace/sdk/python'
108-
entrypoint: sh
109-
args: ['-c', 'python setup.py sdist && pip install twine && twine upload dist/*${SHORT_SHA}* --repository-url https://${_PYPI_REPO} -u ${_PYPI_USER} -p ${_PYPI_PWD}']
110-
env:
111-
- SHORT_SHA=${SHORT_SHA}
112100
- id: install-python-sdk
113-
waitFor: ['upload-python-package']
101+
waitFor: ['unit-test-python-sdk']
114102
name: 'python:3.7-buster'
115103
entrypoint: sh
116104
args: ['-c', 'pip install -qe sdk/python']

infra/scripts/init_pypi.sh

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
#!/usr/bin/env bash
2+
3+
mkdir -p /.config && mkdir -p /.config/pip
4+
echo -e "[global]\nindex-url = https://$PYPI_USER:$PYPI_PWD@pypi.prod.konnekt.us/simple/\nextra-index-url = https://pypi.org/simple/\n" > /.config/pip/pip.conf
5+
export PIP_CONFIG_FILE=/.config/pip/pip.conf

infra/terraform/app/main.tf

Lines changed: 48 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,13 @@ data "azurerm_postgresql_server" "postgres" {
1111
locals {
1212
databricks_secret_scope = "feast"
1313
databricks_secret_datalake_key = "azure_account_key"
14+
pypi_password_secret_key = "pypi_password"
15+
pypi_username_secret_key = "pypi_username"
1416
databricks_dbfs_jar_folder = "dbfs:/feast/run${var.run_number}"
15-
databricks_spark_version = "6.6.x-scala2.11"
16-
databricks_vm_type = "Standard_D3_v2"
17-
databricks_instance_pool_name = "Feast"
17+
databricks_spark_version = "6.6.x-scala2.11"
18+
databricks_vm_type = "Standard_D3_v2"
19+
databricks_instance_pool_name = "Feast"
20+
1821
}
1922

2023
resource "azurerm_postgresql_database" "feast" {
@@ -31,10 +34,10 @@ resource "databricks_token" "feast" {
3134
}
3235

3336
resource "databricks_instance_pool" "feast" {
34-
instance_pool_name = local.databricks_instance_pool_name
35-
min_idle_instances = 2
36-
max_capacity = 6
37-
node_type_id = local.databricks_vm_type
37+
instance_pool_name = local.databricks_instance_pool_name
38+
min_idle_instances = 2
39+
max_capacity = 6
40+
node_type_id = local.databricks_vm_type
3841
idle_instance_autotermination_minutes = 60
3942
}
4043

@@ -317,3 +320,41 @@ EOT
317320
helm_release.feast_core
318321
]
319322
}
323+
324+
resource "databricks_secret" "pypi_username" {
325+
key = local.pypi_username_secret_key
326+
string_value = var.pypi_user
327+
scope = databricks_secret_scope.feast.name
328+
}
329+
330+
resource "databricks_secret" "pypi_password" {
331+
key = local.pypi_password_secret_key
332+
string_value = var.pypi_password
333+
scope = databricks_secret_scope.feast.name
334+
}
335+
336+
resource "databricks_dbfs_file" "init_pypi_script" {
337+
content = filebase64("../../scripts/init_pypi.sh")
338+
path = "/databricks/init/init_pypi.sh"
339+
overwrite = true
340+
validate_remote_file = true
341+
}
342+
343+
resource "databricks_cluster" "feast-cluster" {
344+
cluster_name = "feast-dev-test"
345+
spark_version = local.databricks_spark_version
346+
node_type_id = local.databricks_vm_type
347+
autotermination_minutes = 30
348+
349+
autoscale {
350+
min_workers = 0
351+
max_workers = 2
352+
}
353+
354+
spark_env_vars = {
355+
"PYPI_PWD" = "{{secrets/${databricks_secret_scope.feast.name}/${databricks_secret.pypi_password.key}}"
356+
"PYPI_USER" = "{{secrets/${databricks_secret_scope.feast.name}/${databricks_secret.pypi_username.key}}}"
357+
"PYSPARK_PYTHON" = "/databricks/python3/bin/python3"
358+
"PIP_CONFIG_FILE" = "/.config/pip/pip.conf"
359+
}
360+
}

infra/terraform/app/variables.tf

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,3 +98,11 @@ variable "databricks_name" {
9898
description = "The resource name of the Azure Databricks instance."
9999
type = string
100100
}
101+
variable "pypi_user" {
102+
description = "The username to use for internal pypi authentication"
103+
type = string
104+
}
105+
variable "pypi_password" {
106+
description = "The password to use for internal pypi authentication"
107+
type = string
108+
}

0 commit comments

Comments
 (0)