@@ -54,12 +54,6 @@ export REPO_NAME="milk-pouch-classification-repo"
5454# Name of the container image and Cloud Run service
5555export IMAGE_NAME=" milk-pouch-classification-service"
5656
57- # [Input] GCS Bucket name for uploading original images
58- export SOURCE_BUCKET_NAME=" milk-pouch-classification-uploads"
59-
60- # [Output] GCS Bucket name for storing annotated images
61- export DESTINATION_BUCKET_NAME=" milk-pouch-classification-annotated"
62-
6357# [Output] Name of the BigQuery Dataset
6458export BQ_DATASET=" milk_pouch_classification"
6559
@@ -109,6 +103,13 @@ if [[ "${DEVICE}" != "cpu" && "${DEVICE}" != "gpu" ]]; then
109103 exit 1
110104fi
111105
106+ # [Input] GCS Bucket name for uploading original images
107+ export SOURCE_BUCKET_NAME=" milk-pouch-classification-uploads-${PROJECT_ID} "
108+
109+ # [Output] GCS Bucket name for storing annotated images
110+ export DESTINATION_BUCKET_NAME=" milk-pouch-classification-annotated-${PROJECT_ID} "
111+
112+
112113echo " 🚀 Starting deployment for a '${DEVICE} ' configuration..."
113114echo " "
114115
@@ -171,7 +172,7 @@ echo "✅ Step 3: Create BigQuery Dataset and Table..."
171172bq --location=US mk --dataset " ${PROJECT_ID} :${BQ_DATASET} " \
172173 || echo " Dataset '${BQ_DATASET} ' already exists."
173174bq mk --table " ${PROJECT_ID} :${BQ_DATASET} .${BQ_TABLE} " \
174- ./milk_pouch_results_schema.json \
175+ ./src/ milk_pouch_results_schema.json \
175176 || echo " Table '${BQ_TABLE} ' already exists."
176177echo " BigQuery resources are ready."
177178echo " "
0 commit comments