Skip to content

Commit

Permalink
Renaming stuff
Browse files Browse the repository at this point in the history
  • Loading branch information
ahsimb committed Feb 5, 2024
1 parent e5fcd8b commit 91ecfe1
Show file tree
Hide file tree
Showing 29 changed files with 167 additions and 162 deletions.
5 changes: 5 additions & 0 deletions doc/changes/changes_0.2.0.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,10 @@ Version: 0.2.0
## Refactoring
* #160: Implemented the PM's recommendations of 2024-01-24.
* #120: Passing the secret store object (sb_config) as a parameter to all functions that need it.
* #167: Replacing the term "Docker-DB" with "Exasol Docker-DB" in all notebooks and documentation.
* #168: Renaming the section name “Access Configuration” to "Open Secure Configuration Storage".
* #170: Renaming the section name "Set up" to "Setup".
* #182: Renaming the secret store global variable from "sb_config" to "ai_lab_config".
* #169: Renaming the default database schema from "IDA" to "AI_LAB".

## Documentation
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
{
"cell_type": "markdown",
"source": [
"### Access configuration"
"### Open Secure Configuration Storage"
],
"metadata": {
"collapsed": false
Expand Down Expand Up @@ -154,9 +154,9 @@
")\n",
"\"\"\"\n",
"\n",
"with open_pyexasol_connection(sb_config) as conn:\n",
"with open_pyexasol_connection(ai_lab_config) as conn:\n",
" conn.execute(sql, query_params={\n",
" \"schema_name\": sb_config.db_schema,\n",
" \"schema_name\": ai_lab_config.db_schema,\n",
" \"table_name\": TABLE_NAME\n",
" })"
],
Expand Down Expand Up @@ -194,9 +194,9 @@
"S3_ACCESS_KEY = \"\"\n",
"S3_SECRET_KEY = \"\"\n",
"\n",
"with open_pyexasol_connection(sb_config) as conn:\n",
"with open_pyexasol_connection(ai_lab_config) as conn:\n",
" conn.execute(sql, query_params={\n",
" \"schema\": sb_config.db_schema,\n",
" \"schema\": ai_lab_config.db_schema,\n",
" \"access_key\": S3_ACCESS_KEY,\n",
" \"secret_key\": S3_SECRET_KEY,\n",
" })"
Expand Down Expand Up @@ -228,7 +228,7 @@
"outputs": [],
"source": [
"params = {\n",
" \"schema\": sb_config.db_schema,\n",
" \"schema\": ai_lab_config.db_schema,\n",
" \"table\": TABLE_NAME, \n",
"}\n",
"\n",
Expand All @@ -241,7 +241,7 @@
" CONNECTION_NAME = 'S3_CONNECTION';\n",
"\"\"\"\n",
"\n",
"with open_pyexasol_connection(sb_config) as conn:\n",
"with open_pyexasol_connection(ai_lab_config) as conn:\n",
" conn.execute(sql, query_params=params)"
],
"metadata": {
Expand All @@ -268,7 +268,7 @@
"execution_count": null,
"outputs": [],
"source": [
"with open_pyexasol_connection(sb_config) as conn:\n",
"with open_pyexasol_connection(ai_lab_config) as conn:\n",
" data_rows = conn.execute(\"select count(*) from {schema!i}.{table!i}\", query_params=params)\n",
" count = next(data_rows)[0] \n",
" print(f\"Loaded {count} rows\")\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,11 @@
"from exasol.connections import open_bucketfs_connection, open_pyexasol_connection\n",
"\n",
"jar_local_path = github.retrieve_jar(github.Project.CLOUD_STORAGE_EXTENSION, use_local_cache=True)\n",
"bfs_bucket = open_bucketfs_connection(sb_config)\n",
"bfs_bucket = open_bucketfs_connection(ai_lab_config)\n",
"bfs_path = bfs_utils.put_file(bfs_bucket, jar_local_path)\n",
"\n",
"with open_pyexasol_connection(sb_config) as conn:\n",
" cloud_storage.setup_scripts(conn, sb_config.db_schema, bfs_path)\n",
"with open_pyexasol_connection(ai_lab_config) as conn:\n",
" cloud_storage.setup_scripts(conn, ai_lab_config.db_schema, bfs_path)\n",
" \n",
"print(\"Could Storage Extension was initialized\")"
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"\n",
"## Setup\n",
"\n",
"### Access configuration"
"### Open Secure Configuration Storage"
]
},
{
Expand Down Expand Up @@ -118,18 +118,18 @@
"stopwatch = Stopwatch()\n",
"\n",
"# Create an Exasol connection\n",
"with open_pyexasol_connection(sb_config, compression=True) as conn:\n",
"with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n",
"\n",
" # Create tables\n",
" sql = f'CREATE OR REPLACE TABLE \"{sb_config.db_schema}\".\"{train_table}\"({\", \".join(column_desc)})'\n",
" sql = f'CREATE OR REPLACE TABLE \"{ai_lab_config.db_schema}\".\"{train_table}\"({\", \".join(column_desc)})'\n",
" conn.execute(query=sql)\n",
" sql = f'CREATE OR REPLACE TABLE \"{sb_config.db_schema}\".\"{test_table}\" LIKE \"{sb_config.db_schema}\".\"{train_table}\"'\n",
" sql = f'CREATE OR REPLACE TABLE \"{ai_lab_config.db_schema}\".\"{test_table}\" LIKE \"{ai_lab_config.db_schema}\".\"{train_table}\"'\n",
" conn.execute(query=sql)\n",
"\n",
" # Import data into Exasol\n",
" conn.import_from_pandas(df_train, (sb_config.db_schema, train_table))\n",
" conn.import_from_pandas(df_train, (ai_lab_config.db_schema, train_table))\n",
" print(f\"Imported {conn.last_statement().rowcount()} rows into {train_table}.\")\n",
" conn.import_from_pandas(df_test, (sb_config.db_schema, test_table))\n",
" conn.import_from_pandas(df_test, (ai_lab_config.db_schema, test_table))\n",
" print(f\"Imported {conn.last_statement().rowcount()} rows into {test_table}.\")\n",
"\n",
"print(f\"Importing the data took: {stopwatch}\")"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"\n",
"## Setup\n",
"\n",
"### Access configuration"
"### Open Secure Configuration Storage"
]
},
{
Expand Down Expand Up @@ -120,18 +120,18 @@
"stopwatch = Stopwatch()\n",
"\n",
"# Create an Exasol connection\n",
"with open_pyexasol_connection(sb_config, compression=True) as conn:\n",
"with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n",
"\n",
" # Create tables\n",
" sql = f'CREATE OR REPLACE TABLE \"{sb_config.db_schema}\".\"{train_table}\"({\", \".join(column_desc)})'\n",
" sql = f'CREATE OR REPLACE TABLE \"{ai_lab_config.db_schema}\".\"{train_table}\"({\", \".join(column_desc)})'\n",
" conn.execute(query=sql)\n",
" sql = f'CREATE OR REPLACE TABLE \"{sb_config.db_schema}\".\"{test_table}\" LIKE \"{sb_config.db_schema}\".\"{train_table}\"'\n",
" sql = f'CREATE OR REPLACE TABLE \"{ai_lab_config.db_schema}\".\"{test_table}\" LIKE \"{ai_lab_config.db_schema}\".\"{train_table}\"'\n",
" conn.execute(query=sql)\n",
"\n",
" # Import data into Exasol\n",
" conn.import_from_pandas(df_train, (sb_config.db_schema, train_table))\n",
" conn.import_from_pandas(df_train, (ai_lab_config.db_schema, train_table))\n",
" print(f\"Imported {conn.last_statement().rowcount()} rows into {train_table}.\")\n",
" conn.import_from_pandas(df_test, (sb_config.db_schema, test_table))\n",
" conn.import_from_pandas(df_test, (ai_lab_config.db_schema, test_table))\n",
" print(f\"Imported {conn.last_statement().rowcount()} rows into {test_table}.\")\n",
"\n",
"print(f\"Importing the data took: {stopwatch}\")"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
"\n",
"Before starting working with any notebook in the AI-Lab one needs to connect to the configuration store providing the name of the file where the data is stored and the password. If the file with the specified name doesn't exist a new file will be created and protected with the entered password. However, if the file does exist it will be opened only if the provided password matches the password the file was created with.\n",
"\n",
"## Access configuration"
"## Open Secure Configuration Storage"
]
},
{
Expand Down Expand Up @@ -61,7 +61,7 @@
"source": [
"## Choose the database\n",
"\n",
"First, we need to choose which database we will work with. The AI-Lab includes a so-called Docker-DB. This is a single-node database running in a Docker container. It's a good choice of a database for any light-weight experiment designed as a proof of concept.\n",
"First, we need to choose which database we will work with. The AI-Lab includes a so-called Exasol Docker-DB. This is a single-node database running in a Docker container. It's a good choice of a database for any light-weight experiment designed as a proof of concept.\n",
"\n",
"For more practical work one might choose a customer database with real data.\n",
"\n",
Expand All @@ -75,7 +75,7 @@
"metadata": {},
"outputs": [],
"source": [
"display(get_db_selection_ui(sb_config))"
"display(get_db_selection_ui(ai_lab_config))"
]
},
{
Expand All @@ -95,17 +95,17 @@
"metadata": {},
"outputs": [],
"source": [
"display(get_db_config_ui(sb_config))"
"display(get_db_config_ui(ai_lab_config))"
]
},
{
"cell_type": "markdown",
"id": "c57ab0bf-8b56-433e-b8a8-55d8bd4ff931",
"metadata": {},
"source": [
"## Start the Docker-DB\n",
"## Start the Exasol Docker-DB\n",
"\n",
"If we choose to use the Docker-DB we need to launch it after every restart of the AI-Lab. This operation may take a few moments. Please wait till it is completed before proceeding with the next step.\n",
"If we choose to use the Exasol Docker-DB we need to launch it after every restart of the AI-Lab. This operation may take a few moments. Please wait till it is completed before proceeding with the next step.\n",
"\n",
"For a customer database, this operation will take no effect."
]
Expand All @@ -117,7 +117,7 @@
"metadata": {},
"outputs": [],
"source": [
"display(get_start_docker_db_ui(sb_config))"
"display(get_start_docker_db_ui(ai_lab_config))"
]
},
{
Expand All @@ -144,8 +144,8 @@
"\n",
"stopwatch = Stopwatch()\n",
"\n",
"sql = f'CREATE SCHEMA IF NOT EXISTS \"{sb_config.db_schema}\"'\n",
"with open_pyexasol_connection(sb_config, compression=True) as conn:\n",
"sql = f'CREATE SCHEMA IF NOT EXISTS \"{ai_lab_config.db_schema}\"'\n",
"with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n",
" conn.execute(query=sql)\n",
"\n",
"print(f\"Schema created in {stopwatch}\")"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
"\n",
"## Setup\n",
"\n",
"### Access configuration"
"### Open Secure Configuration Storage"
]
},
{
Expand Down Expand Up @@ -101,14 +101,14 @@
"outputs": [],
"source": [
"%%sql\n",
"EXECUTE SCRIPT {{sb_config.db_schema}}.\"SME_DEPLOY_SAGEMAKER_AUTOPILOT_ENDPOINT\"(\n",
" '{{sb_config.JOB_NAME}}', \n",
"EXECUTE SCRIPT {{ai_lab_config.db_schema}}.\"SME_DEPLOY_SAGEMAKER_AUTOPILOT_ENDPOINT\"(\n",
" '{{ai_lab_config.JOB_NAME}}', \n",
" '{{ENDPOINT_NAME}}', \n",
" '{{sb_config.db_schema}}',\n",
" '{{ai_lab_config.db_schema}}',\n",
" '{{INSTANCE_TYPE}}', \n",
" {{INSTANCE_COUNT}}, \n",
" '{{sb_config.sme_aws_connection}}', \n",
" '{{sb_config.aws_region}}'\n",
" '{{ai_lab_config.sme_aws_connection}}', \n",
" '{{ai_lab_config.aws_region}}'\n",
");"
]
},
Expand All @@ -130,7 +130,7 @@
"%%sql\n",
"SELECT SCRIPT_NAME, SCRIPT_TYPE \n",
"FROM SYS.EXA_ALL_SCRIPTS\n",
"WHERE SCRIPT_SCHEMA='{{sb_config.db_schema}}' AND SCRIPT_TYPE = 'UDF'"
"WHERE SCRIPT_SCHEMA='{{ai_lab_config.db_schema}}' AND SCRIPT_TYPE = 'UDF'"
]
},
{
Expand All @@ -155,7 +155,7 @@
"%%sql column_names <<\n",
"SELECT COLUMN_NAME\n",
"FROM SYS.EXA_ALL_COLUMNS\n",
"WHERE COLUMN_SCHEMA = '{{sb_config.db_schema}}' AND COLUMN_TABLE='{{TEST_TABLE_NAME}}' AND COLUMN_NAME <> UPPER('{{TARGET_COLUMN}}');"
"WHERE COLUMN_SCHEMA = '{{ai_lab_config.db_schema}}' AND COLUMN_TABLE='{{TEST_TABLE_NAME}}' AND COLUMN_NAME <> UPPER('{{TARGET_COLUMN}}');"
]
},
{
Expand Down Expand Up @@ -184,8 +184,8 @@
"outputs": [],
"source": [
"%%sql\n",
"SELECT \"{{sb_config.db_schema}}\".\"{{ENDPOINT_NAME}}\"(0, {{column_names}})\n",
"FROM \"{{sb_config.db_schema}}\".\"{{TEST_TABLE_NAME}}\"\n",
"SELECT \"{{ai_lab_config.db_schema}}\".\"{{ENDPOINT_NAME}}\"(0, {{column_names}})\n",
"FROM \"{{ai_lab_config.db_schema}}\".\"{{TEST_TABLE_NAME}}\"\n",
"LIMIT 10"
]
},
Expand All @@ -208,12 +208,12 @@
"WITH TEST_DATA AS\n",
"(\n",
" -- We take data from the test table and add the row number calling it SAMPLE_ID.\n",
" SELECT ROW_NUMBER() OVER () AS SAMPLE_ID, {{column_names}}, [{{TARGET_COLUMN}}] FROM \"{{sb_config.db_schema}}\".\"{{TEST_TABLE_NAME}}\"\n",
" SELECT ROW_NUMBER() OVER () AS SAMPLE_ID, {{column_names}}, [{{TARGET_COLUMN}}] FROM \"{{ai_lab_config.db_schema}}\".\"{{TEST_TABLE_NAME}}\"\n",
")\n",
"WITH MODEL_OUTPUT AS\n",
"(\n",
" -- Make predictions. We will pass the SAMPLE_ID that sould be returned back unchanged.\n",
" SELECT \"{{sb_config.db_schema}}\".\"{{ENDPOINT_NAME}}\"(SAMPLE_ID, {{column_names}})\n",
" SELECT \"{{ai_lab_config.db_schema}}\".\"{{ENDPOINT_NAME}}\"(SAMPLE_ID, {{column_names}})\n",
" FROM TEST_DATA\n",
")\n",
"-- Finally, compute the confusion matrix.\n",
Expand Down Expand Up @@ -242,8 +242,8 @@
"%%sql\n",
"EXECUTE SCRIPT SME_DELETE_SAGEMAKER_AUTOPILOT_ENDPOINT(\n",
" '{{ENDPOINT_NAME}}', \n",
" '{{sb_config.sme_aws_connection}}', \n",
" '{{sb_config.aws_region}}'\n",
" '{{ai_lab_config.sme_aws_connection}}', \n",
" '{{ai_lab_config.aws_region}}'\n",
")"
]
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
"\n",
"## Setup\n",
"\n",
"### Access configuration"
"### Open Secure Configuration Storage"
]
},
{
Expand Down Expand Up @@ -58,7 +58,7 @@
"outputs": [],
"source": [
"%run utils/sme_init_ui.ipynb\n",
"display(get_sme_config_ui(sb_config))"
"display(get_sme_config_ui(ai_lab_config))"
]
},
{
Expand All @@ -82,7 +82,7 @@
"source": [
"from exasol.sagemaker_extension_wrapper import initialize_sme_extension\n",
"\n",
"initialize_sme_extension(sb_config)"
"initialize_sme_extension(ai_lab_config)"
]
},
{
Expand Down
Loading

0 comments on commit 91ecfe1

Please sign in to comment.