Google BigTable
Google BigTable is a fully managed, scalable NoSQL database service for large analytical and operational workloads with up to 99.999% availability
Official Websitehttps://cloud.google.com/bigtable
TagsdatabaseSaas
- JavaScript
- Python
NodeJS packagehttps://www.npmjs.com/package/@google-cloud/bigtable
Version4.3.0
Pypi packagehttps://pypi.org/project/google-cloud-bigtable/
Version2.22.0
Credential configuration
The projectId
corresponds to the project ID from the Google Developer's Console.
The credentials
field of the JSON corresponds to a JSON key of the service account. This key
is downloaded as a JSON file when you create it. It should be something like this:
{
"type": "service_account",
"project_id": "PROJECT_ID",
"private_key_id": "KEY_ID",
"private_key": "-----BEGIN PRIVATE KEY-----\nPRIVATE_KEY\n-----END PRIVATE KEY-----\n",
"client_email": "SERVICE_ACCOUNT_EMAIL",
"client_id": "CLIENT_ID",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://accounts.google.com/o/oauth2/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/SERVICE_ACCOUNT_EMAIL"
}
If you don't have one, create a service account and then, a JSON key for that service account.
info
Ensure to grant permissions over BigTable to this service account.
Here is an example of a filled credential configuration form in YepCode:
Google BigTable Snippets available in editor
note
The title is the triggering text for YepCode to autocomplete the script.
- JavaScript
- Python
Integration
New integration from credential
const googleBigTableClient = yepcode.integration.googleBigTable("credential-slug");
New integration from plain authentication data
const { Bigtable } = require("@google-cloud/bigtable");
const googleBigTableCredentials = {
projectId: "YepCode",
credentials: {
type: "service_account",
project_id: "yepcode",
private_key_id: "XXXXX",
private_key: "-----BEGIN PRIVATE KEY-----\nx\n-----END PRIVATE KEY-----",
client_email: "yepcode@example.org",
client_id: "1234567890",
auth_uri: "https://example.org",
token_uri: "https://example.org",
auth_provider_x509_cert_url: "https://example.org",
client_x509_cert_url: "https://example.org",
}
};
const googleBigTableClient = new Bigtable(googleBigTableCredentials);
Write a Row
Simple
try {
const instance = bigtable.instance("instance-id");
const table = instance.table("table-id");
const rowToInsert = {
key: "id#4c410523#20190501",
data: {
stats_summary: {
kind: "human",
nature: "agressive",
constitution: "strong",
},
},
};
await table.insert(rowToInsert);
console.log(`Successfully wrote row ${rowToInsert.key}`);
} catch (error) {
console.error(error);
}
Multiple
try {
const instance = bigtable.instance("instance-id");
const table = instance.table("table-id");
const rowsToInsert = [
{
key: "id#7x410521#90190501",
data: {
stats_summary: {
kind: "fremen",
nature: "peaceful",
constitution: "strong",
},
},
},
{
key: "id#4c410523#20190501",
data: {
stats_summary: {
kind: "human",
nature: "agressive",
constitution: "strong",
},
},
},
];
await table.insert(rowsToInsert);
console.log(
`Successfully wrote 2 rows: ${rowsToInsert[0].key} and ${rowsToInsert[1].key}`
);
} catch (error) {
console.error(error);
}
Conditional
try {
const instance = bigtable.instance("instance-id");
const table = instance.table("table-id");
const row = table.row("row-id");
const filter = [{ column: "stats_summary", value: { nature: "agressive" } }];
const config = {
onMatch: [
{ method: "insert", data: { stats_summary: { dangerous: true } } },
],
};
await row.filter(filter, config);
console.log("Successfully updated row");
} catch (error) {
console.error(error);
}
Read a Row
Simple
try {
const instance = bigtable.instance("instance-id");
const table = instance.table("table-id");
const [row] = await table.row("row-id").get();
console.log(row);
} catch (error) {
console.error(error);
}
Multiple
try {
const instance = bigtable.instance("instance-id");
const table = instance.table("table-id");
const [rows] = await table.getRows({ keys: ["row-id"] });
rows.forEach((row) => console.log(row.id, row.data));
} catch (error) {
console.error(error);
}
Conditional
try {
const instance = bigtable.instance("instance-id");
const table = instance.table("table-id");
const filter = [{ column: "stats_summary", value: { nature: "agressive" } }];
const readStream = await table.createReadStream({ filter });
readStream.on("error", (err) => {
console.log(err);
});
readStream.on("data", (row) => {
console.log(row.id, row.data);
});
readStream.on("end", () => {});
} catch (error) {
console.error(error);
}
Integration
New integration from credential
big_table_client = yepcode.integration.google_big_table("credential-slug")
New integration from plain authentication data
from google.cloud.bigtable.client import Client
from google.oauth2.service_account import Credentials
project_id = "yepcode"
credentialsDict = {
"type": "service_account",
"project_id": "yepcode",
"private_key_id": "XXXXX",
"private_key": "-----BEGIN PRIVATE KEY-----\nx\n-----END PRIVATE KEY-----",
"client_email": "yepcode@example.org",
"client_id": "1234567890",
"auth_uri": "https://example.org",
"token_uri": "https://example.org",
"auth_provider_x509_cert_url": "https://example.org",
"client_x509_cert_url": "https://example.org",
}
credentials = Credentials.from_service_account_info(credentialsDict)
big_table_client = Client(project=project_id, credentials=credentials)
Create Instance
Create an instance
from google.cloud.bigtable import enums
my_instance_id = "instance-id"
my_cluster_id = "cluster-id"
location_id = "location-id"
serve_nodes = 1
storage_type = enums.StorageType.SSD
production = enums.Instance.Type.PRODUCTION
labels = {"prod-label": "prod-label"}
instance = big_table_client.instance(my_instance_id, instance_type=production, labels=labels)
cluster = instance.cluster(
my_cluster_id,
location_id=location_id,
serve_nodes=serve_nodes,
default_storage_type=storage_type,
)
operation = instance.create(clusters=[cluster])
operation.result(timeout=100)
Create Table
Create a table
instance = big_table_client.instance("instance-id")
table = instance.table("table_id")
table.create()
Create Column Family
Create a column family
instance = big_table_client.instance("instance-id")
table = instance.table("table_id")
column_family = table.column_family("column_family_id"")
column_family.create()
Write a Row
Simple
instance = big_table_client.instance("instance-id")
table = instance.table("table_id")
column_family_id = "stats_summary"
row_key = "id#4c410523#20190501"
row = table.direct_row(row_key)
row.set_cell(column_family_id, "kind", "human")
row.set_cell(column_family_id, "nature", "agressive")
row.set_cell(column_family_id, "constitution", "strong")
row.commit()
print(f"Successfully wrote row {row_key}.")
Multiple
instance = big_table_client.instance("instance-id")
table = instance.table("table_id")
column_family_id = "stats_summary"
rows = [
table.direct_row("id#7x410521#90190501"),
table.direct_row("id#4c410523#20190501"),
]
rows[0].set_cell(column_family_id, "kind", "fremen")
rows[0].set_cell(column_family_id, "nature", "peaceful")
rows[0].set_cell(column_family_id, "constitution", "strong")
rows[1].set_cell(column_family_id, "kind", "human")
rows[1].set_cell(column_family_id, "nature", "agressive")
rows[1].set_cell(column_family_id, "constitution", "strong")
response = table.mutate_rows(rows)
for i, status in enumerate(response):
if status.code != 0:
print(f"Error writing row {i}: {status.message}")
raise Exception()
print("Successfully wrote 2 rows.")
Conditional
from google.cloud.bigtable import row_filters
instance = big_table_client.instance("instance-id")
table = instance.table("table_id")
column_family_id = "stats_summary"
row_key = "row-id"
row_filter = row_filters.RowFilterChain(
filters=[
row_filters.FamilyNameRegexFilter(column_family_id),
row_filters.ColumnQualifierRegexFilter("conditional-column-qualifier"),
row_filters.ValueRegexFilter("conditional-value"),
]
)
row = table.conditional_row(row_key, filter_=row_filter)
row.set_cell(column_family_id, "column-qualifier", "value")
row.commit()
print("Successfully updated row.")
Read a Row
Simple
instance = big_table_client.instance("instance-id")
table = instance.table("table_id")
row_key = "row-id"
row = table.read_row(row_key)
print(row)
Multiple
from google.cloud.bigquery.row_set import RowSet
instance = big_table_client.instance("instance-id")
table = instance.table("table_id")
row_set = RowSet()
row_set.add_row_key(b"row-id-1")
row_set.add_row_key(b"row-id-2")
rows = table.read_rows(row_set=row_set)
for row in rows:
print(row.cells)
Multiple based on prefix
from google.cloud.bigquery.row_set import RowSet
instance = big_table_client.instance("instance-id")
table = instance.table("table_id")
prefix = "prefix"
end_key = prefix[:-1] + chr(ord(prefix[-1]) + 1)
row_set = RowSet()
row_set.add_row_range_from_keys(prefix.encode("utf-8"), end_key.encode("utf-8"))
rows = table.read_rows(row_set=row_set)
for row in rows:
print(row.cells)
Conditional
from google.cloud.bigtable import row_filters
instance = big_table_client.instance("instance-id")
table = instance.table("table_id")
filter = row_filters.RowFilterChain(
filters=[
row_filters.FamilyNameRegexFilter("column-family-id"),
row_filters.ColumnQualifierRegexFilter("column-qualifier"),
row_filters.ValueRegexFilter("value"),
]
)
rows = table.read_rows(filter_=filter)
for row in rows:
print(row.cells)