Task 2. Create minimal training and validation data
import os, json, math, shutil
import numpy as np
import tensorflow as tf
# environment variables used by bash cells
PROJECT=!(gcloud config get-value project)
PROJECT=PROJECT[0]
REGION = 'us-central1'
BUCKET='{}-dsongcp'.format(PROJECT)
os.environ['ENDPOINT_NAME'] = 'flights'
os.environ['BUCKET'] = BUCKET
os.environ['REGION'] = REGION
os.environ['TF_VERSION']='2-' + tf.__version__[2:3]
%%bigquery
CREATE OR REPLACE TABLE dsongcp.flights_train_data AS
SELECT
IF(arr_delay < 15, 1.0, 0.0) AS ontime,
dep_delay,
taxi_out,
distance,
origin,
dest,
EXTRACT(hour FROM dep_time) AS dep_hour,
IF (EXTRACT(dayofweek FROM dep_time) BETWEEN 2 AND 6, 1, 0) AS is_weekday,
UNIQUE_CARRIER AS carrier,
dep_airport_lat,
dep_airport_lon,
arr_airport_lat,
arr_airport_lon
FROM dsongcp.flights_tzcorr f
JOIN dsongcp.trainday t
ON f.FL_DATE = t.FL_DATE
WHERE
f.CANCELLED = False AND
f.DIVERTED = False AND
is_train_day = 'True'
Query is running: 0%| |
<style scoped>
.dataframe tbody tr th:only-of-type {
vertical-align: middle;
}
.dataframe tbody tr th {
vertical-align: top;
}
.dataframe thead th {
text-align: right;
}
%%bigquery
CREATE OR REPLACE TABLE dsongcp.flights_eval_data AS
SELECT
IF(arr_delay < 15, 1.0, 0.0) AS ontime,
dep_delay,
taxi_out,
distance,
origin,
dest,
EXTRACT(hour FROM dep_time) AS dep_hour,
IF (EXTRACT(dayofweek FROM dep_time) BETWEEN 2 AND 6, 1, 0) AS is_weekday,
UNIQUE_CARRIER AS carrier,
dep_airport_lat,
dep_airport_lon,
arr_airport_lat,
arr_airport_lon
FROM dsongcp.flights_tzcorr f
JOIN dsongcp.trainday t
ON f.FL_DATE = t.FL_DATE
WHERE
f.CANCELLED = False AND
f.DIVERTED = False AND
is_train_day = 'False'
Query is running: 0%| |
<style scoped>
.dataframe tbody tr th:only-of-type {
vertical-align: middle;
}
.dataframe tbody tr th {
vertical-align: top;
}
.dataframe thead th {
text-align: right;
}
%%bigquery
CREATE OR REPLACE TABLE dsongcp.flights_all_data AS
SELECT
IF(arr_delay < 15, 1.0, 0.0) AS ontime,
dep_delay,
taxi_out,
distance,
origin,
dest,
EXTRACT(hour FROM dep_time) AS dep_hour,
IF (EXTRACT(dayofweek FROM dep_time) BETWEEN 2 AND 6, 1, 0) AS is_weekday,
UNIQUE_CARRIER AS carrier,
dep_airport_lat,
dep_airport_lon,
arr_airport_lat,
arr_airport_lon,
IF (is_train_day = 'True',
IF(ABS(MOD(FARM_FINGERPRINT(CAST(f.FL_DATE AS STRING)), 100)) < 60, 'TRAIN', 'VALIDATE'),
'TEST') AS data_split
FROM dsongcp.flights_tzcorr f
JOIN dsongcp.trainday t
ON f.FL_DATE = t.FL_DATE
WHERE
f.CANCELLED = False AND
f.DIVERTED = False
Query is running: 0%| |
<style scoped>
.dataframe tbody tr th:only-of-type {
vertical-align: middle;
}
.dataframe tbody tr th {
vertical-align: top;
}
.dataframe thead th {
text-align: right;
}
%%bash
PROJECT=$(gcloud config get-value project)
for dataset in "train" "eval" "all"; do
TABLE=dsongcp.flights_${dataset}_data
CSV=gs://${BUCKET}/ch9/data/${dataset}.csv
echo "Exporting ${TABLE} to ${CSV} and deleting table"
bq --project_id=${PROJECT} extract --destination_format=CSV $TABLE $CSV
bq --project_id=${PROJECT} rm -f $TABLE
done
Exporting dsongcp.flights_train_data to gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/data/train.csv and deleting table
Waiting on bqjob_r3cec2379bff62eb2_0000018a39f00313_1 ... (65s) Current status: DONE
Exporting dsongcp.flights_eval_data to gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/data/eval.csv and deleting table
Waiting on bqjob_r420cc6b3f363ef6b_0000018a39f11292_1 ... (22s) Current status: DONE
Exporting dsongcp.flights_all_data to gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/data/all.csv and deleting table
Waiting on bqjob_r52d369f68dd495ef_0000018a39f17c13_1 ... (85s) Current status: DONE
!gsutil ls -lh gs://{BUCKET}/ch9/data
445.01 MiB 2023-08-28T02:21:55Z gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/data/all.csv
115.19 MiB 2023-08-28T02:20:34Z gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/data/eval.csv
296.96 MiB 2023-08-28T02:19:57Z gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/data/train.csv
TOTAL: 3 objects, 898801258 bytes (857.16 MiB)
DEVELOP_MODE = True
NUM_EXAMPLES = 5000*1000
training_data_uri = 'gs://{}/ch9/data/train*'.format(BUCKET)
validation_data_uri = 'gs://{}/ch9/data/eval*'.format(BUCKET)
NBUCKETS = 5
NEMBEDS = 3
TRAIN_BATCH_SIZE = 64
DNN_HIDDEN_UNITS = '64,32'
if DEVELOP_MODE:
train_df = tf.data.experimental.make_csv_dataset(training_data_uri, batch_size=5)
for n, data in enumerate(train_df):
numpy_data = {k: v.numpy() for k, v in data.items()}
print(n, numpy_data)
if n==1: break
2023-08-28 02:23:28.059312: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /usr/local/cuda/lib64:/usr/local/nccl2/lib:/usr/local/cuda/extras/CUPTI/lib64
2023-08-28 02:23:28.059366: W tensorflow/stream_executor/cuda/cuda_driver.cc:269] failed call to cuInit: UNKNOWN ERROR (303)
2023-08-28 02:23:28.059407: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (tensorflow): /proc/driver/nvidia/version does not exist
2023-08-28 02:23:28.062878: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
2023-08-28 02:23:29.082322: I tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc:185] None of the MLIR Optimization Passes are enabled (registered 2)
0 {'ontime': array([1, 0, 1, 1, 0], dtype=int32), 'dep_delay': array([ 4, 42, 0, -3, 32], dtype=int32), 'taxi_out': array([18, 65, 12, 6, 13], dtype=int32), 'distance': array([ 642, 2419, 1616, 479, 290], dtype=int32), 'origin': array([b'EWR', b'IAD', b'DFW', b'SMF', b'SLC'], dtype=object), 'dest': array([b'SDF', b'SFO', b'PDX', b'PDX', b'BOI'], dtype=object), 'dep_hour': array([ 1, 19, 1, 5, 4], dtype=int32), 'is_weekday': array([0, 1, 0, 0, 1], dtype=int32), 'carrier': array([b'EV', b'UA', b'AA', b'WN', b'DL'], dtype=object), 'dep_airport_lat': array([40.6925 , 38.9475 , 32.89722 , 38.695557, 40.788334],
dtype=float32), 'dep_airport_lon': array([ -74.16861 , -77.46 , -97.03778 , -121.590836, -111.977776],
dtype=float32), 'arr_airport_lat': array([38.174168, 37.61889 , 45.58861 , 45.58861 , 43.564445],
dtype=float32), 'arr_airport_lon': array([ -85.73639, -122.375 , -122.59695, -122.59695, -116.22278],
dtype=float32)}
1 {'ontime': array([1, 1, 0, 1, 1], dtype=int32), 'dep_delay': array([-3, -4, 15, -9, -2], dtype=int32), 'taxi_out': array([12, 22, 10, 19, 28], dtype=int32), 'distance': array([806, 771, 569, 550, 157], dtype=int32), 'origin': array([b'LAS', b'IAH', b'SJC', b'SFO', b'ORD'], dtype=object), 'dest': array([b'GEG', b'TYS', b'PDX', b'PDX', b'FWA'], dtype=object), 'dep_hour': array([17, 17, 14, 16, 21], dtype=int32), 'is_weekday': array([0, 0, 0, 1, 1], dtype=int32), 'carrier': array([b'WN', b'EV', b'AS', b'AS', b'MQ'], dtype=object), 'dep_airport_lat': array([36.08 , 29.984444, 37.362778, 37.61889 , 41.979443],
dtype=float32), 'dep_airport_lon': array([-115.15222, -95.34139, -121.92917, -122.375 , -87.9075 ],
dtype=float32), 'arr_airport_lat': array([47.619167, 35.810833, 45.58861 , 45.58861 , 40.978333],
dtype=float32), 'arr_airport_lon': array([-117.53528 , -83.99416 , -122.59695 , -122.59695 , -85.195274],
dtype=float32)}
def features_and_labels(features):
label = features.pop('ontime')
return features, label
def read_dataset(pattern, batch_size, mode=tf.estimator.ModeKeys.TRAIN, truncate=None):
dataset = tf.data.experimental.make_csv_dataset(pattern, batch_size, num_epochs=1)
dataset = dataset.map(features_and_labels)
if mode == tf.estimator.ModeKeys.TRAIN:
dataset = dataset.shuffle(batch_size*10)
dataset = dataset.repeat()
dataset = dataset.prefetch(1)
if truncate is not None:
dataset = dataset.take(truncate)
return dataset
if DEVELOP_MODE:
print("Checking input pipeline")
one_item = read_dataset(training_data_uri, batch_size=2, truncate=1)
print(list(one_item)) # should print one batch of 2 items
Checking input pipeline
[(OrderedDict([('dep_delay', <tf.Tensor: shape=(2,), dtype=int32, numpy=array([-2, 21], dtype=int32)>), ('taxi_out', <tf.Tensor: shape=(2,), dtype=int32, numpy=array([22, 12], dtype=int32)>), ('distance', <tf.Tensor: shape=(2,), dtype=int32, numpy=array([481, 763], dtype=int32)>), ('origin', <tf.Tensor: shape=(2,), dtype=string, numpy=array([b'ATL', b'LAS'], dtype=object)>), ('dest', <tf.Tensor: shape=(2,), dtype=string, numpy=array([b'RIC', b'PDX'], dtype=object)>), ('dep_hour', <tf.Tensor: shape=(2,), dtype=int32, numpy=array([18, 0], dtype=int32)>), ('is_weekday', <tf.Tensor: shape=(2,), dtype=int32, numpy=array([1, 1], dtype=int32)>), ('carrier', <tf.Tensor: shape=(2,), dtype=string, numpy=array([b'DL', b'WN'], dtype=object)>), ('dep_airport_lat', <tf.Tensor: shape=(2,), dtype=float32, numpy=array([33.636665, 36.08 ], dtype=float32)>), ('dep_airport_lon', <tf.Tensor: shape=(2,), dtype=float32, numpy=array([ -84.42778, -115.15222], dtype=float32)>), ('arr_airport_lat', <tf.Tensor: shape=(2,), dtype=float32, numpy=array([37.50528, 45.58861], dtype=float32)>), ('arr_airport_lon', <tf.Tensor: shape=(2,), dtype=float32, numpy=array([ -77.319725, -122.59695 ], dtype=float32)>)]), <tf.Tensor: shape=(2,), dtype=int32, numpy=array([1, 1], dtype=int32)>)]
Task 4. Create, train and evaluate TensorFlow model
import tensorflow as tf
real = {
colname : tf.feature_column.numeric_column(colname)
for colname in
(
'dep_delay,taxi_out,distance,dep_hour,is_weekday,' +
'dep_airport_lat,dep_airport_lon,' +
'arr_airport_lat,arr_airport_lon'
).split(',')
}
sparse = {
'carrier': tf.feature_column.categorical_column_with_vocabulary_list('carrier',
vocabulary_list='AS,VX,F9,UA,US,WN,HA,EV,MQ,DL,OO,B6,NK,AA'.split(',')),
'origin' : tf.feature_column.categorical_column_with_hash_bucket('origin', hash_bucket_size=1000),
'dest' : tf.feature_column.categorical_column_with_hash_bucket('dest', hash_bucket_size=1000),
}
inputs = {
colname : tf.keras.layers.Input(name=colname, shape=(), dtype='float32')
for colname in real.keys()
}
inputs.update({
colname : tf.keras.layers.Input(name=colname, shape=(), dtype='string')
for colname in sparse.keys()
})
Bucketing
latbuckets = np.linspace(20.0, 50.0, NBUCKETS).tolist() # USA
lonbuckets = np.linspace(-120.0, -70.0, NBUCKETS).tolist() # USA
disc = {}
disc.update({
'd_{}'.format(key) : tf.feature_column.bucketized_column(real[key], latbuckets)
for key in ['dep_airport_lat', 'arr_airport_lat']
})
disc.update({
'd_{}'.format(key) : tf.feature_column.bucketized_column(real[key], lonbuckets)
for key in ['dep_airport_lon', 'arr_airport_lon']
})
# cross columns that make sense in combination
sparse['dep_loc'] = tf.feature_column.crossed_column(
[disc['d_dep_airport_lat'], disc['d_dep_airport_lon']], NBUCKETS*NBUCKETS)
sparse['arr_loc'] = tf.feature_column.crossed_column(
[disc['d_arr_airport_lat'], disc['d_arr_airport_lon']], NBUCKETS*NBUCKETS)
sparse['dep_arr'] = tf.feature_column.crossed_column([sparse['dep_loc'], sparse['arr_loc']], NBUCKETS ** 4)
# embed all the sparse columns
embed = {
'embed_{}'.format(colname) : tf.feature_column.embedding_column(col, NEMBEDS)
for colname, col in sparse.items()
}
real.update(embed)
# one-hot encode the sparse columns
sparse = {
colname : tf.feature_column.indicator_column(col)
for colname, col in sparse.items()
}
if DEVELOP_MODE:
print(sparse.keys())
print(real.keys())
dict_keys(['carrier', 'origin', 'dest', 'dep_loc', 'arr_loc', 'dep_arr'])
dict_keys(['dep_delay', 'taxi_out', 'distance', 'dep_hour', 'is_weekday', 'dep_airport_lat', 'dep_airport_lon', 'arr_airport_lat', 'arr_airport_lon', 'embed_carrier', 'embed_origin', 'embed_dest', 'embed_dep_loc', 'embed_arr_loc', 'embed_dep_arr'])
Train and evaluate the model
output_dir='gs://{}/ch9/trained_model'.format(BUCKET)
os.environ['OUTDIR'] = output_dir # needed for deployment
print('Writing trained model to {}'.format(output_dir))
Writing trained model to gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/trained_model
!gsutil -m rm -rf $OUTDIR
CommandException: 1 files/objects could not be removed.
# Build a wide-and-deep model.
def wide_and_deep_classifier(inputs, linear_feature_columns, dnn_feature_columns, dnn_hidden_units):
deep = tf.keras.layers.DenseFeatures(dnn_feature_columns, name='deep_inputs')(inputs)
layers = [int(x) for x in dnn_hidden_units.split(',')]
for layerno, numnodes in enumerate(layers):
deep = tf.keras.layers.Dense(numnodes, activation='relu', name='dnn_{}'.format(layerno+1))(deep)
wide = tf.keras.layers.DenseFeatures(linear_feature_columns, name='wide_inputs')(inputs)
both = tf.keras.layers.concatenate([deep, wide], name='both')
output = tf.keras.layers.Dense(1, activation='sigmoid', name='pred')(both)
model = tf.keras.Model(inputs, output)
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
return model
model = wide_and_deep_classifier(
inputs,
linear_feature_columns = sparse.values(),
dnn_feature_columns = real.values(),
dnn_hidden_units = DNN_HIDDEN_UNITS)
tf.keras.utils.plot_model(model, 'flights_model.png', show_shapes=False, rankdir='LR')
('You must install pydot (`pip install pydot`) and install graphviz (see instructions at https://graphviz.gitlab.io/download/) ', 'for plot_model/model_to_dot to work.')
# training and evaluation dataset
train_batch_size = TRAIN_BATCH_SIZE
if DEVELOP_MODE:
eval_batch_size = 100
steps_per_epoch = 3
epochs = 2
num_eval_examples = eval_batch_size*10
else:
eval_batch_size = 100
steps_per_epoch = NUM_EXAMPLES // train_batch_size
epochs = 10
num_eval_examples = eval_batch_size * 100
train_dataset = read_dataset(training_data_uri, train_batch_size)
eval_dataset = read_dataset(validation_data_uri, eval_batch_size, tf.estimator.ModeKeys.EVAL, num_eval_examples)
checkpoint_path = '{}/checkpoints/flights.cpt'.format(output_dir)
shutil.rmtree(checkpoint_path, ignore_errors=True)
cp_callback = tf.keras.callbacks.ModelCheckpoint(checkpoint_path,
save_weights_only=True,
verbose=1)
history = model.fit(train_dataset,
validation_data=eval_dataset,
epochs=epochs,
steps_per_epoch=steps_per_epoch,
callbacks=[cp_callback])
Epoch 1/2
3/3 [==============================] - 12s 3s/step - loss: 2.7928 - accuracy: 0.2240 - val_loss: 0.6199 - val_accuracy: 0.7789
Epoch 00001: saving model to gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/trained_model/checkpoints/flights.cpt
Epoch 2/2
3/3 [==============================] - 10s 5s/step - loss: 0.3926 - accuracy: 0.8750 - val_loss: 1.3856 - val_accuracy: 0.8171
Epoch 00002: saving model to gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/trained_model/checkpoints/flights.cpt
import matplotlib.pyplot as plt
nrows = 1
ncols = 2
fig = plt.figure(figsize=(10, 5))
for idx, key in enumerate(['loss', 'accuracy']):
ax = fig.add_subplot(nrows, ncols, idx+1)
plt.plot(history.history[key])
plt.plot(history.history['val_{}'.format(key)])
plt.title('model {}'.format(key))
plt.ylabel(key)
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left');

Export the trained model
import time
export_dir = '{}/export/flights_{}'.format(output_dir, time.strftime("%Y%m%d-%H%M%S"))
print('Exporting to {}'.format(export_dir))
tf.saved_model.save(model, export_dir)
Exporting to gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/trained_model/export/flights_20230828-022634
2023-08-28 02:26:37.688871: W tensorflow/python/util/util.cc:348] Sets are not currently considered sequences, but this may change in the future, so consider avoiding using them.
INFO:tensorflow:Assets written to: gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/trained_model/export/flights_20230828-022634/assets
Task 5. Deploy flights model to Vertex AI
%%bash
# note TF_VERSION and ENDPOINT_NAME set in 1st cell
# TF_VERSION=2-6
# ENDPOINT_NAME=flights
TIMESTAMP=$(date +%Y%m%d-%H%M%S)
MODEL_NAME=${ENDPOINT_NAME}-${TIMESTAMP}
EXPORT_PATH=$(gsutil ls ${OUTDIR}/export | tail -1)
echo $EXPORT_PATH
# create the model endpoint for deploying the model
if [[ $(gcloud beta ai endpoints list --region=$REGION \
--format='value(DISPLAY_NAME)' --filter=display_name=${ENDPOINT_NAME}) ]]; then
echo "Endpoint for $MODEL_NAME already exists"
else
echo "Creating Endpoint for $MODEL_NAME"
gcloud beta ai endpoints create --region=${REGION} --display-name=${ENDPOINT_NAME}
fi
ENDPOINT_ID=$(gcloud beta ai endpoints list --region=$REGION \
--format='value(ENDPOINT_ID)' --filter=display_name=${ENDPOINT_NAME})
echo "ENDPOINT_ID=$ENDPOINT_ID"
# delete any existing models with this name
for MODEL_ID in $(gcloud beta ai models list --region=$REGION --format='value(MODEL_ID)' --filter=display_name=${MODEL_NAME}); do
echo "Deleting existing $MODEL_NAME ... $MODEL_ID "
gcloud ai models delete --region=$REGION $MODEL_ID
done
# create the model using the parameters docker conatiner image and artifact uri
gcloud beta ai models upload --region=$REGION --display-name=$MODEL_NAME \
--container-image-uri=us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.${TF_VERSION}:latest \
--artifact-uri=$EXPORT_PATH
MODEL_ID=$(gcloud beta ai models list --region=$REGION --format='value(MODEL_ID)' --filter=display_name=${MODEL_NAME})
echo "MODEL_ID=$MODEL_ID"
# deploy the model to the endpoint
gcloud beta ai endpoints deploy-model $ENDPOINT_ID \
--region=$REGION \
--model=$MODEL_ID \
--display-name=$MODEL_NAME \
--machine-type=n1-standard-2 \
--min-replica-count=1 \
--max-replica-count=1 \
--traffic-split=0=100
gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/trained_model/export/flights_20230828-022634/
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
WARNING: The following filter keys were not present in any resource : display_name
Creating Endpoint for flights-20230828-022709
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
Waiting for operation [6599843490895495168]...
..........................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................done.
Created Vertex AI endpoint: projects/381610902297/locations/us-central1/endpoints/6847584694951739392.
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
ENDPOINT_ID=6847584694951739392
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
WARNING: The following filter keys were not present in any resource : display_name
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
Waiting for operation [5026257633594572800]...
............................................................................................done.
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
MODEL_ID=7306885886245863424
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
Waiting for operation [8493607134204788736]...
............................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................done.
Deployed a model to the endpoint 6847584694951739392. Id of the deployed model: 9196994752904429568.
%%writefile example_input.json
{"instances": [
{"dep_hour": 2, "is_weekday": 1, "dep_delay": 40, "taxi_out": 17, "distance": 41, "carrier": "AS", "dep_airport_lat": 58.42527778, "dep_airport_lon": -135.7075, "arr_airport_lat": 58.35472222, "arr_airport_lon": -134.57472222, "origin": "GST", "dest": "JNU"},
{"dep_hour": 22, "is_weekday": 0, "dep_delay": -7, "taxi_out": 7, "distance": 201, "carrier": "HA", "dep_airport_lat": 21.97611111, "dep_airport_lon": -159.33888889, "arr_airport_lat": 20.89861111, "arr_airport_lon": -156.43055556, "origin": "LIH", "dest": "OGG"}
]}
Writing example_input.json
%%bash
ENDPOINT_ID=$(gcloud beta ai endpoints list --region=$REGION \
--format='value(ENDPOINT_ID)' --filter=display_name=${ENDPOINT_NAME})
echo $ENDPOINT_ID
gcloud beta ai endpoints predict $ENDPOINT_ID --region=$REGION --json-request=example_input.json
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
6847584694951739392
Using endpoint [https://us-central1-prediction-aiplatform.googleapis.com/]
[[0.59375304], [0.909939647]]
%%bash
PROJECT=$(gcloud config get-value project)
ENDPOINT_ID=$(gcloud beta ai endpoints list --region=$REGION \
--format='value(ENDPOINT_ID)' --filter=display_name=${ENDPOINT_NAME})
curl -X POST \
-H "Authorization: Bearer "$(gcloud auth application-default print-access-token) \
-H "Content-Type: application/json; charset=utf-8" \
-d @example_input.json \
"https://${REGION}-aiplatform.googleapis.com/v1/projects/${PROJECT}/locations/${REGION}/endpoints/${ENDPOINT_ID}:predict"
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
% Total % Received % Xferd Average Speed Time Time Time Current
Dload Upload Total Spent Left Speed
100 833 0 290 100 543 4393 8227 --:--:-- --:--:-- --:--:-- 12621
{
"predictions": [
[
0.59375304
],
[
0.909939647
]
],
"deployedModelId": "9196994752904429568",
"model": "projects/381610902297/locations/us-central1/models/7306885886245863424",
"modelDisplayName": "flights-20230828-022709",
"modelVersionId": "1"
}
Task 6. Model explainability
%%bash
model_dir=$(gsutil ls ${OUTDIR}/export | tail -1)
echo $model_dir
saved_model_cli show --tag_set serve --signature_def serving_default --dir $model_dir
gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/trained_model/export/flights_20230828-022634/
The given SavedModel SignatureDef contains the following input(s):
inputs['arr_airport_lat'] tensor_info:
dtype: DT_FLOAT
shape: (-1)
name: serving_default_arr_airport_lat:0
inputs['arr_airport_lon'] tensor_info:
dtype: DT_FLOAT
shape: (-1)
name: serving_default_arr_airport_lon:0
inputs['carrier'] tensor_info:
dtype: DT_STRING
shape: (-1)
name: serving_default_carrier:0
inputs['dep_airport_lat'] tensor_info:
dtype: DT_FLOAT
shape: (-1)
name: serving_default_dep_airport_lat:0
inputs['dep_airport_lon'] tensor_info:
dtype: DT_FLOAT
shape: (-1)
name: serving_default_dep_airport_lon:0
inputs['dep_delay'] tensor_info:
dtype: DT_FLOAT
shape: (-1)
name: serving_default_dep_delay:0
inputs['dep_hour'] tensor_info:
dtype: DT_FLOAT
shape: (-1)
name: serving_default_dep_hour:0
inputs['dest'] tensor_info:
dtype: DT_STRING
shape: (-1)
name: serving_default_dest:0
inputs['distance'] tensor_info:
dtype: DT_FLOAT
shape: (-1)
name: serving_default_distance:0
inputs['is_weekday'] tensor_info:
dtype: DT_FLOAT
shape: (-1)
name: serving_default_is_weekday:0
inputs['origin'] tensor_info:
dtype: DT_STRING
shape: (-1)
name: serving_default_origin:0
inputs['taxi_out'] tensor_info:
dtype: DT_FLOAT
shape: (-1)
name: serving_default_taxi_out:0
The given SavedModel SignatureDef contains the following output(s):
outputs['pred'] tensor_info:
dtype: DT_FLOAT
shape: (-1, 1)
name: StatefulPartitionedCall_2:0
Method name is: tensorflow/serving/predict
cols = ('dep_delay,taxi_out,distance,dep_hour,is_weekday,' +
'dep_airport_lat,dep_airport_lon,' +
'arr_airport_lat,arr_airport_lon,' +
'carrier,origin,dest')
inputs = {x: {"inputTensorName": "{}".format(x)}
for x in cols.split(',')}
expl = {
"inputs": inputs,
"outputs": {
"pred": {
"outputTensorName": "pred"
}
}
}
print(expl)
with open('explanation-metadata.json', 'w') as ofp:
json.dump(expl, ofp, indent=2)
{'inputs': {'dep_delay': {'inputTensorName': 'dep_delay'}, 'taxi_out': {'inputTensorName': 'taxi_out'}, 'distance': {'inputTensorName': 'distance'}, 'dep_hour': {'inputTensorName': 'dep_hour'}, 'is_weekday': {'inputTensorName': 'is_weekday'}, 'dep_airport_lat': {'inputTensorName': 'dep_airport_lat'}, 'dep_airport_lon': {'inputTensorName': 'dep_airport_lon'}, 'arr_airport_lat': {'inputTensorName': 'arr_airport_lat'}, 'arr_airport_lon': {'inputTensorName': 'arr_airport_lon'}, 'carrier': {'inputTensorName': 'carrier'}, 'origin': {'inputTensorName': 'origin'}, 'dest': {'inputTensorName': 'dest'}}, 'outputs': {'pred': {'outputTensorName': 'pred'}}}
!cat explanation-metadata.json
{
"inputs": {
"dep_delay": {
"inputTensorName": "dep_delay"
},
"taxi_out": {
"inputTensorName": "taxi_out"
},
"distance": {
"inputTensorName": "distance"
},
"dep_hour": {
"inputTensorName": "dep_hour"
},
"is_weekday": {
"inputTensorName": "is_weekday"
},
"dep_airport_lat": {
"inputTensorName": "dep_airport_lat"
},
"dep_airport_lon": {
"inputTensorName": "dep_airport_lon"
},
"arr_airport_lat": {
"inputTensorName": "arr_airport_lat"
},
"arr_airport_lon": {
"inputTensorName": "arr_airport_lon"
},
"carrier": {
"inputTensorName": "carrier"
},
"origin": {
"inputTensorName": "origin"
},
"dest": {
"inputTensorName": "dest"
}
},
"outputs": {
"pred": {
"outputTensorName": "pred"
}
}
}
Create and deploy another model flights_xai to Vertex AI
%%bash
# note TF_VERSION set in 1st cell, but ENDPOINT_NAME is being changed
# TF_VERSION=2-6
ENDPOINT_NAME=flights_xai
TIMESTAMP=$(date +%Y%m%d-%H%M%S)
MODEL_NAME=${ENDPOINT_NAME}-${TIMESTAMP}
EXPORT_PATH=$(gsutil ls ${OUTDIR}/export | tail -1)
echo $EXPORT_PATH
# create the model endpoint for deploying the model
if [[ $(gcloud beta ai endpoints list --region=$REGION \
--format='value(DISPLAY_NAME)' --filter=display_name=${ENDPOINT_NAME}) ]]; then
echo "Endpoint for $MODEL_NAME already exists"
else
# create model endpoint
echo "Creating Endpoint for $MODEL_NAME"
gcloud beta ai endpoints create --region=${REGION} --display-name=${ENDPOINT_NAME}
fi
ENDPOINT_ID=$(gcloud beta ai endpoints list --region=$REGION \
--format='value(ENDPOINT_ID)' --filter=display_name=${ENDPOINT_NAME})
echo "ENDPOINT_ID=$ENDPOINT_ID"
# delete any existing models with this name
for MODEL_ID in $(gcloud beta ai models list --region=$REGION --format='value(MODEL_ID)' --filter=display_name=${MODEL_NAME}); do
echo "Deleting existing $MODEL_NAME ... $MODEL_ID "
gcloud ai models delete --region=$REGION $MODEL_ID
done
# upload the model using the parameters docker conatiner image, artifact URI, explanation method,
# explanation path count and explanation metadata JSON file `explanation-metadata.json`.
# Here, you keep number of feature permutations to `10` when approximating the Shapley values for explanation.
gcloud beta ai models upload --region=$REGION --display-name=$MODEL_NAME \
--container-image-uri=us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.${TF_VERSION}:latest \
--artifact-uri=$EXPORT_PATH \
--explanation-method=sampled-shapley --explanation-path-count=10 --explanation-metadata-file=explanation-metadata.json
MODEL_ID=$(gcloud beta ai models list --region=$REGION --format='value(MODEL_ID)' --filter=display_name=${MODEL_NAME})
echo "MODEL_ID=$MODEL_ID"
# deploy the model to the endpoint
gcloud beta ai endpoints deploy-model $ENDPOINT_ID \
--region=$REGION \
--model=$MODEL_ID \
--display-name=$MODEL_NAME \
--machine-type=n1-standard-2 \
--min-replica-count=1 \
--max-replica-count=1 \
--traffic-split=0=100
gs://qwiklabs-gcp-03-e5040853c2d2-dsongcp/ch9/trained_model/export/flights_20230828-022634/
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
Creating Endpoint for flights_xai-20230828-024709
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
Waiting for operation [8899353313133199360]...
.....................done.
Created Vertex AI endpoint: projects/381610902297/locations/us-central1/endpoints/3179402798458470400.
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
ENDPOINT_ID=3179402798458470400
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
Waiting for operation [5638324970451894272]...
..................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................done.
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
MODEL_ID=5409744543216041984
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
Waiting for operation [2143953872077455360]...
.......................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................done.
Deployed a model to the endpoint 3179402798458470400. Id of the deployed model: 2761913835345412096.
Task 7. Invoke the deployed model
%%bash
PROJECT=$(gcloud config get-value project)
ENDPOINT_NAME=flights_xai
ENDPOINT_ID=$(gcloud beta ai endpoints list --region=$REGION \
--format='value(ENDPOINT_ID)' --filter=display_name=${ENDPOINT_NAME})
curl -X POST \
-H "Authorization: Bearer "$(gcloud auth application-default print-access-token) \
-H "Content-Type: application/json; charset=utf-8" \
-d @example_input.json \
"https://${REGION}-aiplatform.googleapis.com/v1/projects/${PROJECT}/locations/${REGION}/endpoints/${ENDPOINT_ID}:explain"
Using endpoint [https://us-central1-aiplatform.googleapis.com/]
% Total % Received % Xferd Average Speed Time Time Time Current
Dload Upload Total Spent Left Speed
100 2559 0 2016 100 543 8689 2340 --:--:-- --:--:-- --:--:-- 11030
{
"explanations": [
{
"attributions": [
{
"baselineOutputValue": 0.532136082649231,
"instanceOutputValue": 0.59375303983688354,
"featureAttributions": {
"dep_delay": 0.01288589835166931,
"arr_airport_lat": -0.022652477025985721,
"origin": -0.0035686820745468141,
"dest": 0.0069152891635894777,
"carrier": -0.0072470575571060179,
"dep_airport_lat": 0.088973000645637512,
"distance": 0.0056412637233734129,
"taxi_out": 0.01297255456447601,
"arr_airport_lon": 0.18158756792545319,
"is_weekday": 0.00039418637752532961,
"dep_airport_lon": -0.21801592111587531,
"dep_hour": 0.0037313342094421392
},
"outputIndex": [
0
],
"approximationError": 0.00831190636988141,
"outputName": "pred"
}
]
},
{
"attributions": [
{
"baselineOutputValue": 0.532136082649231,
"instanceOutputValue": 0.90993964672088623,
"featureAttributions": {
"distance": 0.32391445338726038,
"arr_airport_lat": -0.015640795230865479,
"dep_delay": 0.00046990811824798579,
"dest": 0.0019374907016754151,
"carrier": -8.7702274322509771e-05,
"taxi_out": 0.0044553101062774656,
"dep_hour": 0.059252753853797913,
"dep_airport_lon": -0.17889939248561859,
"dep_airport_lat": 0.014450299739837649,
"is_weekday": 0,
"arr_airport_lon": 0.17022762298583979,
"origin": -0.002276384830474854
},
"outputIndex": [
0
],
"approximationError": 0.0094906026013256775,
"outputName": "pred"
}
]
}
],
"deployedModelId": "2761913835345412096",
"predictions": [
[
0.59375304
],
[
0.909939647
]
]
}