0% found this document useful (0 votes)
49 views

Important Lab

Gsp lab

Uploaded by

Shashank Dwivedi
Copyright
© © All Rights Reserved
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
49 views

Important Lab

Gsp lab

Uploaded by

Shashank Dwivedi
Copyright
© © All Rights Reserved
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
You are on page 1/ 9

export INSTANCE=

export ZONE=

curl -LO raw.githubusercontent.com/QUICK-GCP-LAB/2-Minutes-Labs-Solutions/main/


Deploy%20and%20Troubleshoot%20a%20Website%20Challenge%20Lab/gsp101.sh

sudo chmod +x gsp101.sh

./gsp101.sh

------------------------------
GSP 306

export ZONE=
export REGION=

gcloud sql instances create wordpress --tier=db-n1-standard-1 --activation-


policy=ALWAYS --zone $ZONE

gcloud sql users set-password --host % root --instance wordpress --password


Password1*

ADDRESS=$(gcloud compute instances describe blog --zone=$ZONE --


format="get(networkInterfaces[0].accessConfigs[0].natIP)")/32

gcloud sql instances patch wordpress --authorized-networks $ADDRESS --quiet

gcloud compute ssh "blog" --zone=$ZONE --project=$DEVSHELL_PROJECT_ID --quiet

sudo apt-get update

MYSQLIP=$(gcloud sql instances describe wordpress --


format="value(ipAddresses.ipAddress)")

export MYSQL_PWD=Password1*

mysql --host=$MYSQLIP --user=root << EOF


CREATE DATABASE wordpress;
CREATE USER 'blogadmin'@'%' IDENTIFIED BY 'Password1*';
GRANT ALL PRIVILEGES ON wordpress.* TO 'blogadmin'@'%';
FLUSH PRIVILEGES;
EOF

sudo mysqldump -u root -pPassword1* wordpress > wordpress_backup.sql

mysql --host=$MYSQLIP --user=root -pPassword1* --verbose wordpress <


wordpress_backup.sql

sudo service apache2 restart

cd /var/www/html/wordpress

EXTERNAL_IP=$(gcloud sql instances describe wordpress --


format="value(ipAddresses[0].ipAddress)")

CONFIG_FILE="wp-config.php"

sudo sed -i "s/define('DB_HOST', 'localhost')/define('DB_HOST', '$EXTERNAL_IP')/"


$CONFIG_FILE
-----------------------------

export ZONE=

curl -LO raw.githubusercontent.com/Techcps/GSP-Short-Trick/master/Scale%20Out%20and


%20Update%20a%20Containerized%20Application%20on%20a%20Kubernetes%20Cluster%3A
%20Challenge%20Lab/techcpsgsp305.sh
sudo chmod +x techcpsgsp305.sh
./techcpsgsp305.sh

-----------------------------------

export ZONE=

curl -LO raw.githubusercontent.com/Techcps/GSP-Short-Trick/master/Build%20and


%20Deploy%20a%20Docker%20Image%20to%20a%20Kubernetes%20Cluster:%20Challenge%20Lab/
techcps304.sh
sudo chmod +x techcps304.sh
./techcps304.sh

-----------------------------------
export ZONE=

curl -LO raw.githubusercontent.com/QUICK-GCP-LAB/2-Minutes-Labs-Solutions/main/


Configure%20Secure%20RDP%20using%20a%20Windows%20Bastion%20Host%20Challenge%20Lab/
gsp303.sh

sudo chmod +x gsp303.sh

./gsp303.sh

----------------------------------------
export ZONE=

curl -LO raw.githubusercontent.com/QUICK-GCP-LAB/2-Minutes-Labs-Solutions/main/


Deploy%20a%20Compute%20Instance%20with%20a%20Remote%20Startup%20Script%20Challenge
%20Lab/gsp301.sh

sudo chmod +x gsp301.sh

./gsp301.sh

----------------------------------------
GSP343
-----------------------------------------------------------------------------------
-------------------------------------------------------------------------------

TASK 1:

ZONE=us-central1-b

gcloud container clusters create Cluster_Name --project=$DEVSHELL_PROJECT_ID --


zone=$ZONE --machine-type=e2-standard-2 --num-nodes=2

kubectl create namespace dev

kubectl create namespace prod


git clone https://github.com/GoogleCloudPlatform/microservices-demo.git &&
cd microservices-demo && kubectl apply -f ./release/kubernetes-manifests.yaml --
namespace dev

-----------------------------------------------------------------------------------
-------------------------------------------------------------------------------

TASK 2:

gcloud container node-pools create Pool_Name --cluster=Cluster_Name --machine-


type=custom-2-3584 --num-nodes=2 --zone=$ZONE

for node in $(kubectl get nodes -l cloud.google.com/gke-nodepool=default-pool -


o=name); do kubectl cordon "$node"; done

for node in $(kubectl get nodes -l cloud.google.com/gke-nodepool=default-pool -


o=name); do kubectl drain --force --ignore-daemonsets --delete-local-data --grace-
period=10 "$node"; done

kubectl get pods -o=wide --namespace=dev

gcloud container node-pools delete default-pool --cluster Cluster_Name --zone $ZONE

-----------------------------------------------------------------------------------
-------------------------------------------------------------------------------

TASK 3:

kubectl create poddisruptionbudget onlineboutique-frontend-pdb --selector


app=frontend --min-available 1 --namespace dev

KUBE_EDITOR="nano" kubectl edit deployment/frontend --namespace dev

Find the image under spec replace.

replace image: gcr.io/qwiklabs-resources/onlineboutique-frontend:v2.1

and in imagePullPolicy: Always

Press ctrl+x and Y then enter, for save and exit

-----------------------------------------------------------------------------------
-------------------------------------------------------------------------------

TASK 4:
kubectl autoscale deployment frontend --cpu-percent=50 --min=1 --max=17 --namespace
dev

kubectl get hpa --namespace dev


ZONE=us-central1-b

gcloud beta container clusters update Cluster_Name --enable-autoscaling --min-nodes


1 --max-nodes 6 --zone=$ZONE

-----------------------------------------------------------------------------------
-------------------------------------------------------------------------------

# Set Up a Google Cloud Network: Challenge Lab ||


[GSP314](https://www.cloudskillsboost.google/focuses/10417?parent=catalog) ||

## Solution [here](https://youtu.be/kG0HpV05nlk)

### Task 1: Migrate a stand-alone PostgreSQL database to a Cloud SQL for PostgreSQL
instance

1. Enable the [Database Migration


API](https://console.cloud.google.com/marketplace/product/google/datamigration.goog
leapis.com) and the [Service Networking
API](https://console.cloud.google.com/marketplace/product/google/servicenetworking.
googleapis.com)

2. Go to [VM instances](https://console.cloud.google.com/compute/instances)

3. Click `SSH` next to `antern-postgresql-vm`

### Run the following Commands in `SSH`

```
sudo apt install postgresql-13-pglogical
```
```
sudo su - postgres -c "gsutil cp gs://cloud-training/gsp918/pg_hba_append.conf ."
sudo su - postgres -c "gsutil cp
gs://cloud-training/gsp918/postgresql_append.conf ."
sudo su - postgres -c "cat pg_hba_append.conf >>
/etc/postgresql/13/main/pg_hba.conf"
sudo su - postgres -c "cat postgresql_append.conf >>
/etc/postgresql/13/main/postgresql.conf"
sudo systemctl restart postgresql@13-main
```
```
sudo su - postgres
psql
```
```
\c postgres;
CREATE EXTENSION pglogical;
\c orders;
CREATE EXTENSION pglogical;
\c gmemegen_db;
CREATE EXTENSION pglogical;
```

* NOTE: Replace `[MIGRATION ADMINE]` with your `Postgres Migration Username`

```
CREATE USER [MIGRATION ADMINE] PASSWORD 'DMS_1s_cool!';
ALTER DATABASE orders OWNER TO [MIGRATION ADMINE];
ALTER ROLE [MIGRATION ADMINE] WITH REPLICATION;

\c postgres;
GRANT USAGE ON SCHEMA pglogical TO [MIGRATION ADMINE];
GRANT ALL ON SCHEMA pglogical TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.tables TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.depend TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.local_node TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.local_sync_status TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.node TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.node_interface TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.queue TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.replication_set TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.replication_set_seq TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.replication_set_table TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.sequence_state TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.subscription TO [MIGRATION ADMINE];

\c orders;
GRANT USAGE ON SCHEMA pglogical TO [MIGRATION ADMINE];
GRANT ALL ON SCHEMA pglogical TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.tables TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.depend TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.local_node TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.local_sync_status TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.node TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.node_interface TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.queue TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.replication_set TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.replication_set_seq TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.replication_set_table TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.sequence_state TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.subscription TO [MIGRATION ADMINE];

GRANT USAGE ON SCHEMA public TO [MIGRATION ADMINE];


GRANT ALL ON SCHEMA public TO [MIGRATION ADMINE];
GRANT SELECT ON public.distribution_centers TO [MIGRATION ADMINE];
GRANT SELECT ON public.inventory_items TO [MIGRATION ADMINE];
GRANT SELECT ON public.order_items TO [MIGRATION ADMINE];
GRANT SELECT ON public.products TO [MIGRATION ADMINE];
GRANT SELECT ON public.users TO [MIGRATION ADMINE];

\c gmemegen_db;
GRANT USAGE ON SCHEMA pglogical TO [MIGRATION ADMINE];
GRANT ALL ON SCHEMA pglogical TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.tables TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.depend TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.local_node TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.local_sync_status TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.node TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.node_interface TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.queue TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.replication_set TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.replication_set_seq TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.replication_set_table TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.sequence_state TO [MIGRATION ADMINE];
GRANT SELECT ON pglogical.subscription TO [MIGRATION ADMINE];

GRANT USAGE ON SCHEMA public TO [MIGRATION ADMINE];


GRANT ALL ON SCHEMA public TO [MIGRATION ADMINE];
GRANT SELECT ON public.meme TO [MIGRATION ADMINE];

\c orders;
\dt
ALTER TABLE public.distribution_centers OWNER TO [MIGRATION ADMINE];
ALTER TABLE public.inventory_items OWNER TO [MIGRATION ADMINE];
ALTER TABLE public.order_items OWNER TO [MIGRATION ADMINE];
ALTER TABLE public.products OWNER TO [MIGRATION ADMINE];
ALTER TABLE public.users OWNER TO [MIGRATION ADMINE];
\dt

ALTER TABLE public.inventory_items ADD PRIMARY KEY(id);


\q
exit
```

* Go to [CREATE MIGRATION
JOB](https://console.cloud.google.com/dbmigration/migrations/create)

* Now Follow [Video's](https://youtu.be/kG0HpV05nlk) Instructions

### Task 2: Update permissions and add IAM roles to users

* Go to [IAM](https://console.cloud.google.com/iam-admin/iam) and follow [Video's]


(https://youtu.be/kG0HpV05nlk) instructions

### Task 3: Create networks and firewalls

* Note: For this task, you will need to log in to the `Cymbal Project` with the
`Cymbal Owner credentials`.

### Run the following Commands in CloudShell

### Assign Veriables


```
export VPC_NAME=
export SUBNET_A=
export REGION_A=
export SUBNET_B=
export REGION_B=
export FIREWALL_RULE_NAME_1=
export FIREWALL_RULE_NAME_2=
export FIREWALL_RULE_NAME_3=
```
```
curl -LO raw.githubusercontent.com/QUICK-GCP-LAB/2-Minutes-Labs-Solutions/main/Set
%20Up%20a%20Google%20Cloud%20Network%20Challenge%20Lab/gsp314.sh

sudo chmod +x gsp314.sh

./gsp314.sh
```

### Task 4: Troubleshoot and fix a broken GKE cluster

* Go to [Create sink](https://console.cloud.google.com/logs/router/sink)

* Paste the following in `Build inclusion filter`

```
resource.type=REPLACE HERE;
severity=ERROR
```

* Go to [IAM](https://console.cloud.google.com/iam-admin/iam) and follow [Video's]


(https://youtu.be/kG0HpV05nlk) instructions

### Congratulations 🎉 for completing the Challenge Lab !

##### *You Have Successfully Demonstrated Your Skills And Determination.*

#### *Well done!*

#### Don't Forget to Join the [Telegram Channel](https://t.me/QuickGcpLab) &


[Discussion group](https://t.me/QuickGcpLabChats)

# [QUICK GCP LAB](https://www.youtube.com/@quickgcplab)

---------------------
# Perform Foundational Data, ML, and AI Tasks in Google Cloud: Challenge Lab ||
[GSP323](https://www.cloudskillsboost.google/focuses/11044?parent=catalog) ||

## Solution [here](https://youtu.be/q_PxFoQwMM8)

## Task 1 : Run a simple Dataflow job

```
bq mk REPLACE_HERE
```
```
gsutil mb gs://REPLACE_HERE
```
```
gsutil cp gs://cloud-training/gsp323/lab.csv .
gsutil cp gs://cloud-training/gsp323/lab.schema .
cat lab.schema
```

## Task 2 : Run a simple Dataproc job

### `For Task 2 Follow Video`

```
export API_KEY=
```
```
export TASK_3_BUCKET=
```
```
export TASK_4_BUCKET=
```

## Task 3: Use the Google Cloud Speech API

```
gcloud iam service-accounts create Awesome \
--display-name "my natural language service account"

gcloud iam service-accounts keys create ~/key.json \


--iam-account Awesome@${GOOGLE_CLOUD_PROJECT}.iam.gserviceaccount.com

export GOOGLE_APPLICATION_CREDENTIALS="/home/$USER/key.json"

gcloud auth activate-service-account Awesome@$


{GOOGLE_CLOUD_PROJECT}.iam.gserviceaccount.com --key-
file=$GOOGLE_APPLICATION_CREDENTIALS

gcloud ml language analyze-entities --content="Old Norse texts portray Odin as one-


eyed and long-bearded, frequently wielding a spear named Gungnir and wearing a
cloak and a broad hat." > result.json
```
```
gcloud auth login --no-launch-browser
```

## Task 4: Use the Cloud Natural Language API

```
gsutil cp result.json $TASK_4_BUCKET
```
```
cat > request.json <<EOF
{
"config": {
"encoding":"FLAC",
"languageCode": "en-US"
},
"audio": {
"uri":"gs://cloud-training/gsp323/task3.flac"
}
}
EOF
```
```
curl -s -X POST -H "Content-Type: application/json" --data-binary @request.json \
"https://speech.googleapis.com/v1/speech:recognize?key=${API_KEY}" > result.json

gsutil cp result.json $TASK_3_BUCKET


```
```
gcloud iam service-accounts create quick-gcp-lab

gcloud iam service-accounts keys create key.json --iam-account quick-gcp-lab@$


{GOOGLE_CLOUD_PROJECT}.iam.gserviceaccount.com

gcloud auth activate-service-account --key-file key.json

export ACCESS_TOKEN=$(gcloud auth print-access-token)


```
```
cat > request.json <<EOF
{
"inputUri":"gs://spls/gsp154/video/train.mp4",
"features": [
"TEXT_DETECTION"
]
}
EOF
```
```
curl -s -H 'Content-Type: application/json' \
-H "Authorization: Bearer $ACCESS_TOKEN" \
'https://videointelligence.googleapis.com/v1/videos:annotate' \
-d @request.json
```
```
curl -s -H 'Content-Type: application/json' -H "Authorization: Bearer
$ACCESS_TOKEN"
'https://videointelligence.googleapis.com/v1/operations/OPERATION_FROM_PREVIOUS_REQ
UEST' > result1.json
```

### Congratulations 🎉 for completing the Challenge Lab !

##### *You Have Successfully Demonstrated Your Skills And Determination.*

#### *Well done!*

#### Don't Forget to Join the [Telegram Channel](https://t.me/QuickGcpLab) &


[Discussion group](https://t.me/QuickGcpLabChats)

# [QUICK GCP LAB](https://www.youtube.com/@quickgcplab)

You might also like