Skip to content

Commit 11c5553

Browse files
authored
Merge branch 'main' into ODSC-64654-register-model-artifact-reference
2 parents c87b1fa + 88d6feb commit 11c5553

File tree

147 files changed

+11353
-2215
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

147 files changed

+11353
-2215
lines changed
Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
name: "Forecast Explainer Tests"
2+
3+
on:
4+
workflow_dispatch:
5+
pull_request:
6+
branches: [ "main", "operators/**" ]
7+
8+
# Cancel in progress workflows on pull_requests.
9+
# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
10+
concurrency:
11+
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
12+
cancel-in-progress: true
13+
14+
permissions:
15+
contents: read
16+
17+
env:
18+
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 5
19+
20+
jobs:
21+
test:
22+
name: python ${{ matrix.python-version }}
23+
runs-on: ubuntu-latest
24+
timeout-minutes: 180
25+
26+
strategy:
27+
fail-fast: false
28+
matrix:
29+
python-version: ["3.10", "3.11"]
30+
31+
steps:
32+
- uses: actions/checkout@v4
33+
with:
34+
fetch-depth: 0
35+
ref: ${{ github.event.pull_request.head.sha }}
36+
37+
- uses: actions/setup-python@v5
38+
with:
39+
python-version: ${{ matrix.python-version }}
40+
cache: "pip"
41+
cache-dependency-path: |
42+
pyproject.toml
43+
"**requirements.txt"
44+
"test-requirements-operators.txt"
45+
46+
- uses: ./.github/workflows/set-dummy-conf
47+
name: "Test config setup"
48+
49+
- name: "Run Forecast Explainer Tests"
50+
timeout-minutes: 180
51+
shell: bash
52+
run: |
53+
set -x # print commands that are executed
54+
$CONDA/bin/conda init
55+
source /home/runner/.bashrc
56+
pip install -r test-requirements-operators.txt
57+
pip install "oracle-automlx[forecasting]>=25.1.1"
58+
pip install pandas>=2.2.0
59+
python -m pytest -v -p no:warnings --durations=5 tests/operators/forecast/test_explainers.py

.github/workflows/run-forecast-unit-tests.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,6 @@ jobs:
5656
$CONDA/bin/conda init
5757
source /home/runner/.bashrc
5858
pip install -r test-requirements-operators.txt
59-
pip install "oracle-automlx[forecasting]>=24.4.0"
59+
pip install "oracle-automlx[forecasting]>=25.1.1"
6060
pip install pandas>=2.2.0
61-
python -m pytest -v -p no:warnings --durations=5 tests/operators/forecast
61+
python -m pytest -v -p no:warnings --durations=5 tests/operators/forecast --ignore=tests/operators/forecast/test_explainers.py

.github/workflows/run-unittests-py39-py310.yml renamed to .github/workflows/run-unittests-py310-py311.yml

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: "[Py3.9-3.11] - All Unit Tests"
1+
name: "[Py3.10-3.11] - All Unit Tests"
22

33
on:
44
workflow_dispatch:
@@ -33,15 +33,14 @@ jobs:
3333
strategy:
3434
fail-fast: false
3535
matrix:
36-
python-version: ["3.9", "3.10", "3.11"]
36+
python-version: ["3.10", "3.11"]
3737
name: ["unitary", "slow_tests"]
3838
include:
3939
- name: "unitary"
4040
test-path: "tests/unitary"
4141
# `model` tests running in "slow_tests",
4242
# `feature_store` tests has its own test suite
43-
# `forecast` tests not supported in python 3.9,3.10 (automlx dependency). Tests are running in python3.8 test env, see run-unittests-py38-cov-report.yml
44-
# 'pii' tests run only with py3.8, 'datapane' library conflicts with pandas>2.2.0, which used in py3.9/3.10 setup
43+
# `forecast` tests not run in this suite
4544
# 'hpo' tests hangs if run together with all unitary tests. Tests running in separate command before running all unitary
4645
ignore-path: |
4746
--ignore tests/unitary/with_extras/model \

.github/workflows/run-unittests-py38-cov-report.yml renamed to .github/workflows/run-unittests-py39-cov-report.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: "[Py3.8][COV REPORT] - All Unit Tests"
1+
name: "[Py3.9][COV REPORT] - All Unit Tests"
22

33
on:
44
workflow_dispatch:
@@ -26,7 +26,7 @@ env:
2626

2727
jobs:
2828
test:
29-
name: python 3.8, ${{ matrix.name }}
29+
name: python 3.9, ${{ matrix.name }}
3030
runs-on: ubuntu-latest
3131
timeout-minutes: 90
3232

@@ -58,7 +58,7 @@ jobs:
5858

5959
- uses: actions/setup-python@v5
6060
with:
61-
python-version: "3.8"
61+
python-version: "3.9"
6262
cache: "pip"
6363
cache-dependency-path: |
6464
pyproject.toml
@@ -71,7 +71,7 @@ jobs:
7171
name: "Test env setup"
7272
timeout-minutes: 30
7373

74-
# Installing pii deps for python3.8 test setup only, it will not work with python3.9/3.10, because
74+
# Installing pii deps for python3.9 test setup only, it will not work with python3.9/3.10, because
7575
# 'datapane' library conflicts with pandas>2.2.0, which used in py3.9/3.10 setup
7676
- name: "Install PII dependencies"
7777
run: |

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,8 @@ celerybeat-schedule
8686
*.sage.py
8787

8888
# dotenv
89-
.env
89+
.env*
90+
run_ads.sh
9091

9192
# virtualenv
9293
.venv

README-development.md

Lines changed: 121 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,41 @@
1+
<!-- TOC -->
12
# Summary
23

34
The Oracle Accelerated Data Science (ADS) SDK used by data scientists and analysts for
45
data exploration and experimental machine learning to democratize machine learning and
5-
analytics by providing easy-to-use, performant, and user friendly tools that
6+
analytics by providing easy-to-use,
7+
performant, and user friendly tools that
68
brings together the best of data science practices.
79

810
The ADS SDK helps you connect to different data sources, perform exploratory data analysis,
911
data visualization, feature engineering, model training, model evaluation, and
1012
model interpretation. ADS also allows you to connect to the model catalog to save and load
1113
models to and from the catalog.
1214

15+
- [Summary](#summary)
16+
- [Documentation](#documentation)
17+
- [Get Support](#get-support)
18+
- [Getting started](#getting-started)
19+
- [Step 1: Create a conda environment](#step-1-create-a-conda-environment)
20+
- [Step 2: Activate your environment](#step-2-activate-your-environment)
21+
- [Step 3: Clone ADS and install dependencies](#step-3-clone-ads-and-install-dependencies)
22+
- [Step 4: Setup configuration files](#step-4-setup-configuration-files)
23+
- [Step 5: Versioning and generation the wheel](#step-5-versioning-and-generation-the-wheel)
24+
- [Running tests](#running-tests)
25+
- [Running default setup tests](#running-default-setup-tests)
26+
- [Running all unit tests](#running-all-unit-tests)
27+
- [Running integration tests](#running-integration-tests)
28+
- [Running opctl integration tests](#running-opctl-integration-tests)
29+
- [Local Setup of AQUA API JupyterLab Server](#local-setup-of-aqua-api-jupyterlab-server)
30+
- [Step 1: Requirements](#step-1-requirements)
31+
- [Step 2: Create local .env files](#step-2-create-local-env-files)
32+
- [Step 3: Add the run\_ads.sh script in the ADS Repository](#step-3-add-the-run_adssh-script-in-the-ads-repository)
33+
- [Step 4: Run the JupyterLab Server](#step-4-run-the-jupyterlab-server)
34+
- [Step 5: Run the unit tests for the AQUA API](#step-5-run-the-unit-tests-for-the-aqua-api)
35+
- [Security](#security)
36+
- [License](#license)
37+
38+
1339
## Documentation
1440

1541
- [ads-documentation](https://docs.oracle.com/en-us/iaas/tools/ads-sdk/latest/index.html)
@@ -137,6 +163,100 @@ To build development container, see the [Build Development Container Image](http
137163
python3 -m pytest tests/integration/opctl
138164
```
139165

166+
## Local Setup of AQUA API JupyterLab Server
167+
These are the steps to run the AQUA (AI Quick Actions) API Server for development and testing purposes. The source code for the AQUA API Server is [here](https://github.com/oracle/accelerated-data-science/tree/21ba00b95aef8581991fee6c7d558e2f2b1680ac/ads/aqua) within this repository.
168+
169+
### Step 1: Requirements
170+
+ Complete the [Getting Started](#getting-started) Section above, create a conda environment with python >3.9 or 3.10
171+
+ install any Rest API Client in your IDE (Thunder Client on [vscode](https://marketplace.visualstudio.com/items?itemName=rangav.vscode-thunder-client) or Postman)
172+
+ Activate the conda environment from the Getting Started Section and run
173+
174+
```
175+
pip install -r test-requirements.txt
176+
```
177+
178+
### Step 2: Create local .env files
179+
Running the local JuypterLab server requires setting OCI authentication, proxy, and OCI namespace parameters. Adapt this .env file with your specific OCI profile and OCIDs to set these variables.
180+
181+
```
182+
CONDA_BUCKET_NS="your_conda_bucket"
183+
http_proxy=""
184+
https_proxy=""
185+
HTTP_PROXY=""
186+
HTTPS_PROXY=""
187+
OCI_ODSC_SERVICE_ENDPOINT="your_service_endpoint"
188+
AQUA_SERVICE_MODELS_BUCKET="service-managed-models"
189+
AQUA_TELEMETRY_BUCKET_NS=""
190+
PROJECT_COMPARTMENT_OCID="ocid1.compartment.oc1.<your_ocid>"
191+
OCI_CONFIG_PROFILE="your_oci_profile_name"
192+
OCI_IAM_TYPE="security_token" # no modification needed if using token-based auth
193+
TENANCY_OCID="ocid1.tenancy.oc1.<your_ocid>"
194+
AQUA_JOB_SUBNET_ID="ocid1.subnet.oc1.<your_ocid>"
195+
ODSC_MODEL_COMPARTMENT_OCID="ocid1.compartment.oc1.<your_ocid>"
196+
PROJECT_OCID="ocid1.datascienceproject.oc1.<your_ocid>"
197+
```
198+
199+
### Step 3: Add the run_ads.sh script in the ADS Repository
200+
+ add the shell script below and .env file from step 2 to your local directory of the cloned ADS Repository
201+
+ Run ```chmox +x run_ads.sh``` after you create this script.
202+
```
203+
#!/bin/bash
204+
205+
#### Check if a CLI command is provided
206+
if [ "$#" -lt 1 ]; then
207+
echo "Usage: $0 <cli command>"
208+
exit 1
209+
fi
210+
211+
#### Load environment variables from .env file
212+
if [ -f .env ]; then
213+
export $(grep -v '^#' .env.int | xargs)
214+
else
215+
echo "Error: .env.int file not found!"
216+
exit 1
217+
fi
218+
219+
# Execute the CLI command
220+
"$@"
221+
```
222+
223+
### Step 4: Run the JupyterLab Server
224+
We can start the JupyterLab server using the following command
225+
226+
```
227+
./run_ads.sh jupyter lab --no-browser --ServerApp.disable_check_xsrf=True
228+
```
229+
+ run ```pkill jupyter-lab``` to kill the JupyterLab server and re-run server to reflect changes made locally to the AQUA API
230+
+ to test if server is running via CLI, run this in terminal
231+
232+
```
233+
./run_ads.sh ads aqua model list
234+
```
235+
236+
To make calls to the API, use the link http://localhost:8888/aqua/insert_handler_here with a REST API Client like Thunder Client/ Postman.
237+
238+
Examples of handlers
239+
```
240+
GET http://localhost:8888/aqua/model # calling the model_handler.py
241+
242+
GET http://localhost:8888/aqua/deployments # calling the deployment_handler.py
243+
```
244+
Handlers can be found [here](https://github.com/oracle/accelerated-data-science/tree/21ba00b95aef8581991fee6c7d558e2f2b1680ac/ads/aqua/extension).
245+
246+
### Step 5: Run the unit tests for the AQUA API
247+
All the unit tests can be found [here](https://github.com/oracle/accelerated-data-science/tree/main/tests/unitary/with_extras/aqua).
248+
The following commands detail how the unit tests can be run.
249+
```
250+
# Run all tests in AQUA project
251+
python -m pytest -q tests/unitary/with_extras/aqua/test_deployment.py
252+
253+
# Run all tests specific to a module within in AQUA project (ex. test_deployment.py, test_model.py, etc.)
254+
python -m pytest -q tests/unitary/with_extras/aqua/test_deployment.py
255+
256+
# Run specific test method within the module (replace test_get_deployment_default_params with targeted test method)
257+
python -m pytest tests/unitary/with_extras/aqua/test_deployment.py -k "test_get_deployment_default_params"
258+
```
259+
140260
## Security
141261

142262
Consult the [security guide](./SECURITY.md) for our responsible security

THIRD_PARTY_LICENSES.txt

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,12 @@ fsspec
9696
* Source code: https://github.com/intake/filesystem_spec
9797
* Project home: https://github.com/intake/filesystem_spec
9898

99+
httpx
100+
* Copyright (c) 2021 ProjectDiscovery, Inc.
101+
* License: MIT License
102+
* Source code: https://github.com/projectdiscovery/httpx
103+
* Project home: https://github.com/projectdiscovery/httpx
104+
99105
geopandas
100106
* Copyright (c) 2013-2016, GeoPandas developers
101107
* License: BSD 3-Clause "New" or "Revised" License
@@ -391,6 +397,12 @@ tabulate
391397
* Source code: https://github.com/astanin/python-tabulate
392398
* Project home: https://github.com/astanin/python-tabulate
393399

400+
tenacity
401+
* No listed copyright holder
402+
* License: Apache-2.0 license
403+
* Source code: https://github.com/jd/tenacity
404+
* Project home: https://github.com/jd/tenacity
405+
394406
tensorflow
395407
* Copyright (c) 2023 Google, Inc.
396408
* License: Apache-2.0 license

ads/aqua/__init__.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
#!/usr/bin/env python
2-
# -*- coding: utf-8 -*-
3-
# Copyright (c) 2024 Oracle and/or its affiliates.
2+
# Copyright (c) 2024, 2025 Oracle and/or its affiliates.
43
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
54

65

76
import os
7+
from logging import getLogger
88

99
from ads import logger, set_auth
10+
from ads.aqua.client.client import AsyncClient, Client
1011
from ads.aqua.common.utils import fetch_service_compartment
1112
from ads.config import OCI_RESOURCE_PRINCIPAL_VERSION
1213

@@ -19,6 +20,7 @@ def get_logger_level():
1920
return level
2021

2122

23+
logger = getLogger(__name__)
2224
logger.setLevel(get_logger_level())
2325

2426

@@ -27,7 +29,6 @@ def set_log_level(log_level: str):
2729

2830
log_level = log_level.upper()
2931
logger.setLevel(log_level.upper())
30-
logger.handlers[0].setLevel(log_level)
3132

3233

3334
if OCI_RESOURCE_PRINCIPAL_VERSION:

0 commit comments

Comments
 (0)