- Recipes
- Google BigTable to Amazon S3 NDJSON file
Connect Google BigTable and Amazon S3 NDJSON file in our serverless environment
Use this template to Read rows from Google BigTable using them to create NDJSON file entries in Amazon S3 Bucket.
Share
Read rows from Google BigTable
Used integrations:
- JavaScript
- Python
class GoogleBigtableSourceGetRows {
async init() {
// TODO: Create your google-bigtable credential
// More info at https://yepcode.io/docs/integrations/google-bigtable/#credential-configuration
const googleBigTable = yepcode.integration.googleBigTable(
"your-google-bigquery-bigtable-name"
);
// TODO: Customize your instance and table names
this.table = await googleBigTable
.instance("your-instance-name")
.table("your-table-name");
}
async fetch(publish, done) {
// You can customize get options. eg: get matching keys
// Look at: https://cloud.google.com/nodejs/docs/reference/bigtable/latest/bigtable/getrowsoptions
const getRowsOptions = {};
const [rows] = await this.table.getRows(getRowsOptions);
for (const rowResponse of rows) {
const [row] = await rowResponse.get();
const item = this._mapRowToItem(row);
await publish(item);
}
done();
}
_mapRowToItem(row) {
const columnsAndLastValue = Object.entries(row.data).map(
([columnName, columnContent]) => [
columnName,
this._getLastColumnValue(columnContent),
]
);
return Object.fromEntries(columnsAndLastValue);
}
_getLastColumnValue(columnContent) {
// The second [0] is to access the last value
// You can access previous values by accessing the next
// elements in the list
return columnContent[0][0].value;
}
async close() {}
}
from google.cloud.bigtable import row_filters
class GoogleBigtableSourceGetRows:
def setup(self):
# TODO: Create your BigTable credential:
# More info at https://yepcode.io/docs/integrations/google-bigtable/#credential-configuration
self.big_table_client = yepcode.integration.googleBigTable(
"your-bigtable-credential-name"
)
def generator(self):
# TODO: Customize instance id, table id and filters if needed
instance = self.big_table_client.instance("instance-id")
table = instance.table("table_id")
filter = row_filters.RowFilterChain(
filters=[
row_filters.FamilyNameRegexFilter("column-family-id"),
row_filters.ColumnQualifierRegexFilter("column-qualifier"),
row_filters.ValueRegexFilter("value"),
]
)
rows = table.read_rows(filter_=filter)
# TODO: Customize the item to yield
for row in rows:
item = {"row_key": row.row_key.decode('utf-8')}
for cf, cols in row.cells.items():
for col, cells in cols.items():
for cell in cells:
# This will only get the latest version of the cell value
item[f"{cf}:{col}"] = cell.value.decode('utf-8')
yield item
def close(self):
pass
Do you need help solving this integration with YepCode?
Let's talkCreate NDJSON file entries in Amazon S3 Bucket
Used integrations:
- JavaScript
- Python
class AwsS3TargetUploadNdjson {
async init() {
// TODO: Create your aws-s3 credential
// More info at https://yepcode.io/docs/integrations/aws-s3/#credential-configuration
const awsS3 = yepcode.integration.awsS3("your-aws-s3-credential-name");
// Transforms the items into a ndjson format
this.stringifier = ndjson.stringify();
const targetStream = new PassThrough();
this.stringifier.pipe(targetStream);
// TODO: customize the Upload content
// More info at: https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/interfaces/_aws_sdk_lib_storage.options-1.html
const upload = new Upload({
client: awsS3,
params: {
Bucket: "your-bucket-name",
Key: "your-file-name.ndjson",
Body: targetStream,
},
});
upload.on("httpUploadProgress", (progress) => {
console.log(`Upload progress`, progress);
});
this.uploadPromise = upload.done();
}
async consume(item) {
this.stringifier.write(item);
}
async close() {
try {
this.stringifier.end();
} catch (error) {
console.error(`Error ending stringifier`, error);
}
try {
await this.uploadPromise;
} catch (error) {
console.error(`Error ending upload`, error);
}
}
}
import ndjson
import io
class AccumulatingStream:
def __init__(self):
self.data = io.BytesIO()
def write(self, item):
self.data.write(item.encode("utf-8"))
def get_stream(self):
self.data.seek(0)
return self.data
class AwsS3TargetUploadNdjson:
def setup(self):
# TODO: Create your S3 credential:
# More info at https://yepcode.io/docs/integrations/aws-s3/#credential-configuration
self.aws_s3_client = yepcode.integration.awsS3("your-s3-credential-name")
self.acc_stream = AccumulatingStream()
self.stringifier = ndjson.writer(self.acc_stream, ensure_ascii=False)
def consume(self, generator, done):
for item in generator:
self.stringifier.writerow(item)
done()
def close(self):
# TODO: customize the bucket name and object key
try:
self.aws_s3_client.upload_fileobj(
self.acc_stream.get_stream(),
"bucket-name",
"path/to/object.ndjson",
)
except Exception as error:
print(f"Error uploading object: {error}")
FAQs
YepCode is a SaaS platform that enables the creation, execution and monitoring of integrations and automations using source code in a serverless environment.
We like to call it the Zapier for developers, since we bring all the agility and benefits of NoCode tools (avoid server provisioning, environment configuration, deployments,...), but with all the power of being able to use a programming language like JavaScript or Python.
These recipes are an excellent starting point for creating your own YepCode processes and solving complex integration and automation problems.
You only have to complete the sign up form and your account will be created with our FREE plan (no credit card required).
YepCode has been created with a clear enterprise focus, offering a multi-tenant environment, team management capabilities, high security and auditing standards, Identity Provider (IdP) integrations, and on-premise options. It serves as the Swiss army knife for engineering teams, especially those requiring the extraction or transmission of information to external systems. It excels in scenarios demanding flexibility and adaptability to change within the process.
Sure! You only need to configure YepCode servers to establish a connection with that service. Check our docs page to get more information.