Skip to content

Commit

Permalink
chore: add bigquery example
Browse files Browse the repository at this point in the history
  • Loading branch information
metaclips committed Aug 27, 2024
1 parent 79bb5bd commit 53a2bbb
Show file tree
Hide file tree
Showing 6 changed files with 495 additions and 0 deletions.
104 changes: 104 additions & 0 deletions examples/command/portals/databases/bigquery/datastream_corp/app.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
const { BigQuery } = require('@google-cloud/bigquery');
const process = require('process');

const projectId = process.env.GOOGLE_CLOUD_PROJECT;
if (!projectId) {
console.error('GOOGLE_CLOUD_PROJECT environment variable must be set.');
process.exit(1);
}

const credentials_base64 = process.env.GOOGLE_APPLICATION_CREDENTIALS_BASE64;
const credentials_json = Buffer.from(credentials_base64, 'base64').toString('utf-8');

const credentials = JSON.parse(credentials_json);

// Configure the endpoint to our portal
const bigqueryOptions = {
projectId: projectId,
apiEndpoint: 'http://127.0.0.1:8080',
maxRetries: 100,
credentials: credentials
};

// Create a BigQuery client
const bigquery = new BigQuery(bigqueryOptions);

async function createDataset(datasetId) {
const [dataset] = await bigquery.createDataset(datasetId);
console.log(`Dataset ${dataset.id} created.`);
}

async function createTable(datasetId, tableId) {
const schema = [
{ name: 'name', type: 'STRING' },
{ name: 'age', type: 'INTEGER' },
{ name: 'email', type: 'STRING' }
];

const options = {
schema: schema
};

const [table] = await bigquery
.dataset(datasetId)
.createTable(tableId, options);

console.log(`Table ${table.id} created.`);
}

async function insertData(datasetId, tableId) {
const rows = [
{ name: 'John Doe', age: 30, email: '[email protected]' },
{ name: 'Jane Smith', age: 25, email: '[email protected]' }
];

await bigquery
.dataset(datasetId)
.table(tableId)
.insert(rows);

console.log(`Inserted ${rows.length} rows into ${tableId}`);
}

async function queryData(datasetId, tableId) {
const query = `
SELECT name, age, email
FROM \`${bigquery.projectId}.${datasetId}.${tableId}\`
WHERE age > 20
`;

const [rows] = await bigquery.query({ query });

console.log('Query Results:');
rows.forEach(row => {
console.log(`Name: ${row.name}, Age: ${row.age}, Email: ${row.email}`);
});
}

async function deleteDataset(datasetId) {
await bigquery.dataset(datasetId).delete({ force: true }); // force: true deletes all tables within the dataset
console.log(`Dataset ${datasetId} deleted.`);
}

// Running all steps
(async () => {
let datasetId = "ockam_" + (Math.random() + 1).toString(36).substring(7);
const tableId = 'ockam_table';

try {
await createDataset(datasetId);
await createTable(datasetId, tableId);
await insertData(datasetId, tableId);
await queryData(datasetId, tableId);

console.log(
"\nThe example run was successful 🥳.\n" +
"\nThe app connected with bigquery over an encrypted portal." +
"\nInserted some data, and querried it back.\n",
);
} catch (error) {
console.error('Error:', error);
}

await deleteDataset(datasetId);
})();
127 changes: 127 additions & 0 deletions examples/command/portals/databases/bigquery/datastream_corp/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
#!/usr/bin/env bash
set -ex

run() {
enrollment_ticket="$1"
GOOGLE_APPLICATION_CREDENTIALS_BASE64=$(echo "$GOOGLE_APPLICATION_CREDENTIALS" | base64)

# ----------------------------------------------------------------------------------------------------------------
# CREATE NETWORK

# Create a new VPC and tag it.
vpc_id=$(aws ec2 create-vpc --cidr-block 10.0.0.0/16 --query 'Vpc.VpcId')
aws ec2 create-tags --resources "$vpc_id" --tags "Key=Name,Value=${name}-vpc"

# Create an Internet Gateway and attach it to the VPC.
gw_id=$(aws ec2 create-internet-gateway --query 'InternetGateway.InternetGatewayId')
aws ec2 attach-internet-gateway --vpc-id "$vpc_id" --internet-gateway-id "$gw_id"

# Create a route table and a route to the Internet through the Gateway.
rtb_id=$(aws ec2 create-route-table --vpc-id "$vpc_id" --query 'RouteTable.RouteTableId')
aws ec2 create-route --route-table-id "$rtb_id" --destination-cidr-block 0.0.0.0/0 --gateway-id "$gw_id"

# Create a subnet and associate the route table
az=$(aws ec2 describe-availability-zones --query "AvailabilityZones[0].ZoneName")
subnet_id=$(aws ec2 create-subnet --vpc-id "$vpc_id" --cidr-block 10.0.0.0/24 \
--availability-zone "$az" --query 'Subnet.SubnetId')
aws ec2 modify-subnet-attribute --subnet-id "$subnet_id" --map-public-ip-on-launch
aws ec2 associate-route-table --subnet-id "$subnet_id" --route-table-id "$rtb_id"

# Create a security group to allow TCP egress to the Internet.
sg_id=$(aws ec2 create-security-group --group-name "${name}-sg" --vpc-id "$vpc_id" --query 'GroupId' \
--description "Allow TCP egress")
aws ec2 authorize-security-group-egress --group-id "$sg_id" --cidr 0.0.0.0/0 --protocol tcp --port 0-65535

# Allow SSH from the machine where this script is running, so we can provision instances.
aws ec2 authorize-security-group-ingress --group-id "$sg_id" --cidr "0.0.0.0/0" --protocol tcp --port 22

# ----------------------------------------------------------------------------------------------------------------
# CREATE INSTANCE

ami_id=$(aws ec2 describe-images --owners 137112412989 --query "Images | sort_by(@, &CreationDate) | [-1].ImageId" \
--filters "Name=name,Values=al2023-ami-2023*" "Name=architecture,Values=x86_64" \
"Name=virtualization-type,Values=hvm" "Name=root-device-type,Values=ebs" )

aws ec2 create-key-pair --key-name "${name}-key" --query 'KeyMaterial' > key.pem
chmod 400 key.pem

sed "s/\$ENROLLMENT_TICKET/${enrollment_ticket}/g" run_ockam.sh > user_data1.sh
sed "s/\$OCKAM_VERSION/${OCKAM_VERSION}/g" user_data1.sh > user_data.sh
instance_id=$(aws ec2 run-instances --image-id "$ami_id" --instance-type c5n.large \
--subnet-id "$subnet_id" --security-group-ids "$sg_id" \
--key-name "${name}-key" --user-data file://user_data.sh --query 'Instances[0].InstanceId')
aws ec2 create-tags --resources "$instance_id" --tags "Key=Name,Value=${name}-ec2-instance"
aws ec2 wait instance-running --instance-ids "$instance_id"
ip=$(aws ec2 describe-instances --instance-ids "$instance_id" --query 'Reservations[0].Instances[0].PublicIpAddress')
rm -f user_data*.sh

until scp -o StrictHostKeyChecking=no -i ./key.pem ./app.js "ec2-user@$ip:app.js"; do sleep 10; done
ssh -o StrictHostKeyChecking=no -i ./key.pem "ec2-user@$ip" \
'bash -s' << EOS
export GOOGLE_CLOUD_PROJECT="$GOOGLE_CLOUD_PROJECT_ID"
export GOOGLE_APPLICATION_CREDENTIALS_BASE64="$GOOGLE_APPLICATION_CREDENTIALS_BASE64"
sudo yum update -y && sudo yum install nodejs -y
npm install @google-cloud/bigquery
node app.js
EOS
}

cleanup() {

# ----------------------------------------------------------------------------------------------------------------
# DELETE INSTANCE

instance_ids=$(aws ec2 describe-instances --filters "Name=tag:Name,Values=${name}-ec2-instance" \
--query "Reservations[*].Instances[*].InstanceId")
for i in $instance_ids; do
aws ec2 terminate-instances --instance-ids "$i"
aws ec2 wait instance-terminated --instance-ids "$i"
done

if aws ec2 describe-key-pairs --key-names "${name}-key" &>/dev/null; then
aws ec2 delete-key-pair --key-name "${name}-key"
fi

rm -rf key.pem

# ----------------------------------------------------------------------------------------------------------------
# DELETE NETWORK

vpc_ids=$(aws ec2 describe-vpcs --query 'Vpcs[*].VpcId' --filters "Name=tag:Name,Values=${name}-vpc")

for vpc_id in $vpc_ids; do
internet_gateways=$(aws ec2 describe-internet-gateways --query "InternetGateways[*].InternetGatewayId" \
--filters Name=attachment.vpc-id,Values="$vpc_id")
for i in $internet_gateways; do
aws ec2 detach-internet-gateway --internet-gateway-id "$i" --vpc-id "$vpc_id"
aws ec2 delete-internet-gateway --internet-gateway-id "$i"
done

subnet_ids=$(aws ec2 describe-subnets --query "Subnets[*].SubnetId" --filters Name=vpc-id,Values="$vpc_id")
for i in $subnet_ids; do aws ec2 delete-subnet --subnet-id "$i"; done

route_tables=$(aws ec2 describe-route-tables --filters Name=vpc-id,Values="$vpc_id" \
--query 'RouteTables[?length(Associations[?Main!=`true`]) > `0` || length(Associations) == `0`].RouteTableId')
for i in $route_tables; do aws ec2 delete-route-table --route-table-id "$i" || true; done

security_groups=$(aws ec2 describe-security-groups --filters Name=vpc-id,Values="$vpc_id" \
--query "SecurityGroups[?!contains(GroupName, 'default')].[GroupId]")
for i in $security_groups; do aws ec2 delete-security-group --group-id "$i"; done

if aws ec2 describe-vpcs --vpc-ids "$vpc_id" &>/dev/null; then
aws ec2 delete-vpc --vpc-id "$vpc_id"
fi
done
}

export AWS_PAGER="";
export AWS_DEFAULT_OUTPUT="text";

user=""
command -v sha256sum &>/dev/null && user=$(aws sts get-caller-identity | sha256sum | cut -c 1-20)
command -v shasum &>/dev/null && user=$(aws sts get-caller-identity | shasum -a 256 | cut -c 1-20)
export name="ockam-ex-ts-ds-$user"

# Check if the first argument is "cleanup"
# If it is, call the cleanup function. If not, call the run function.
if [ "$1" = "cleanup" ]; then cleanup; else run "$1"; fi
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
#!/bin/bash
set -ex

# Change into ec2-user's home directory and use sudo to run the commands as ec2-user
cd /home/ec2-user
sudo -u ec2-user bash << 'EOS'
set -ex
# Install Ockam Command
export OCKAM_VERSION="$OCKAM_VERSION"
curl --proto '=https' --tlsv1.2 -sSfL https://install.command.ockam.io | bash
source "$HOME/.ockam/env"
# Run `ockam project enroll ...`
#
# The `project enroll` command creates a new vault and generates a cryptographic identity with
# private keys stored in that vault.
#
# The enrollment ticket includes routes and identitifiers for the project membership authority
# and the project’s node that offers the relay service.
#
# The enrollment ticket also includes an enrollment token. The project enroll command
# creates a secure channel with the project membership authority and presents this enrollment token.
# The authority enrolls presented identity and returns a project membership credential.
#
# The command, stores this credential for later use and exits.
ockam project enroll "$ENROLLMENT_TICKET"
# Create an ockam node.
#
# Create an access control policy that only allows project members that possesses a credential with
# attribute bigquery-outlet="true" to connect to TCP Portal Inlets on this node.
#
# Create a TCP Portal Inlet to Google's BigQuery API.
# This makes the remote API available on all localhost IPs at - 0.0.0.0:8080
cat << EOF > inlet.yaml
tcp-inlet:
from: 0.0.0.0:8080
via: bigquery
allow: '(= subject.bigquery-outlet "true")'
EOF
ockam node create inlet.yaml
rm inlet.yaml
EOS
38 changes: 38 additions & 0 deletions examples/command/portals/databases/bigquery/metrics_corp/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
#!/usr/bin/env bash

run() {
enrollment_ticket="$1"

# ----------------------------------------------------------------------------------------------------------------
# CREATE INSTANCE AND START RELAY
sed "s/\$ENROLLMENT_TICKET/${enrollment_ticket}/g" run_ockam.sh > user_data1.sh
sed "s/\$OCKAM_VERSION/${OCKAM_VERSION}/g" user_data1.sh > user_data.sh

gcloud compute instances create "${name}-key" \
--project="$GOOGLE_CLOUD_PROJECT_ID" \
--zone="us-central1-c" \
--create-disk=auto-delete=yes,boot=yes,device-name="${name}-key",image=projects/debian-cloud/global/images/debian-12-bookworm-v20240815,mode=rw,size=10,type=pd-balanced \
--machine-type=e2-medium \
--network-interface=network-tier=PREMIUM,stack-type=IPV4_ONLY,subnet=default \
--tags="${name}-key" \
--metadata-from-file=startup-script=user_data.sh

rm -rf user_data*.sh
}

cleanup() {
# ----------------------------------------------------------------------------------------------------------------
# DELETE INSTANCE
gcloud compute instances delete "${name}-key" --zone="us-central1-c" --project="$GOOGLE_CLOUD_PROJECT_ID" --quiet || true
rm -rf user_data*.sh
}


user=""
command -v sha256sum &>/dev/null && user=$(gcloud auth list --format json | jq -r '.[0].account' | sha256sum | cut -c 1-20)
command -v shasum &>/dev/null && user=$(gcloud auth list --format json | jq -r '.[0].account' | shasum -a 256 | cut -c 1-20)
export name="ockam-ex-ts-m-$user"

# Check if the first argument is "cleanup"
# If it is, call the cleanup function. If not, call the run function.
if [ "$1" = "cleanup" ]; then cleanup; else run "$1"; fi
Loading

0 comments on commit 53a2bbb

Please sign in to comment.