Mohsen Akhavan
Mohsen Akhavan

Reputation: 75

Save URL params as CSV file with Python and Azure Function

I wanna send some data with HTTP POST like this one:

https://httptrigger-testfunction.azurewebsites.net/api/HttpTrigger1?id
=test&serial_id
=1254&device_tra
=302&received_time
=2021-03-01

I wrote an Azure function from Microsoft sample from here that read "name" from HTTP POST. Now, I wanna read the above data and save it to a CSV file on blob storage. Which module I should use it?

Sample code:

import logging

import azure.functions as func


def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    name = req.params.get('name')
    if not name:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            name = req_body.get('name')

    if name:
        
        return func.HttpResponse(f"Hello, {name}. This HTTP triggered function executed successfully.")
    else:
        return func.HttpResponse(
             "This HTTP triggered function executed successfully. Pass a name in the query string or in the request body for a personalized response.",
             status_code=200
        )

Upvotes: 0

Views: 687

Answers (2)

Frank Borzage
Frank Borzage

Reputation: 6816

Please refer to my code:

import logging
from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient
import azure.functions as func
import os, uuid
import tempfile


def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    connect_str = "<your-connection-string>"
    container_name = "<your-container-name>"

    id = req.params.get('id')
    if not id:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            id = req_body.get('id')
    
    serial_id = req.params.get('serial_id')
    if not serial_id:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            serial_id = req_body.get('serial_id')

    device_tra = req.params.get('device_tra')
    if not device_tra:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            device_tra = req_body.get('device_tra')

    received_time = req.params.get('received_time')
    if not received_time:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            received_time = req_body.get('received_time')

    # Create the BlobServiceClient object which will be used to create a container client
    blob_service_client = BlobServiceClient.from_connection_string(connect_str)

    # Create the container
    container_client = blob_service_client.get_container_client(container_name)

    # Create a local directory to hold blob data
    local_path = tempfile.gettempdir()

    # Create a file in the local data directory to upload and download
    local_file_name = str(uuid.uuid4()) + ".csv"
    upload_file_path = os.path.join(local_path, local_file_name)
    logging.info(upload_file_path)

    # Write text to the file
    file = open(upload_file_path, 'w')
    csv_content = id + "," + serial_id + "," + device_tra + "," + received_time
    logging.info(csv_content)
    
    file.write(csv_content)
    file.close()

    # Create a blob client using the local file name as the name for the blob
    blob_client = blob_service_client.get_blob_client(container=container_name, blob=local_file_name)

    print("\nUploading to Azure Storage as blob:\n\t" + local_file_name)

    # Upload the created file
    with open(upload_file_path, "rb") as data:
        blob_client.upload_blob(data)

    if id:
        return func.HttpResponse(f"Hello, {id}. This HTTP triggered function executed successfully.")
    else:
        return func.HttpResponse(
             "This HTTP triggered function executed successfully. Pass a name in the query string or in the request body for a personalized response.",
             status_code=200
        )

Or you can use this code:

import logging
from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient
import azure.functions as func
import os, uuid
import tempfile
import csv


def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    connect_str = "<your-connection-string>"
    container_name = "<your-container-name>"

    id = req.params.get('id')
    if not id:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            id = req_body.get('id')
    
    serial_id = req.params.get('serial_id')
    if not serial_id:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            serial_id = req_body.get('serial_id')

    device_tra = req.params.get('device_tra')
    if not device_tra:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            device_tra = req_body.get('device_tra')

    received_time = req.params.get('received_time')
    if not received_time:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            received_time = req_body.get('received_time')

    # Create the BlobServiceClient object which will be used to create a container client
    blob_service_client = BlobServiceClient.from_connection_string(connect_str)

    # Create the container
    container_client = blob_service_client.get_container_client(container_name)

    # Create a local directory to hold blob data
    local_path = tempfile.gettempdir()

    # Create a file in the local data directory to upload and download
    local_file_name = str(uuid.uuid4()) + ".csv"
    upload_file_path = os.path.join(local_path, local_file_name)
    logging.info(upload_file_path)

    with open(upload_file_path, 'w', newline='') as csvfile:
        filewriter = csv.writer(csvfile, delimiter=',',
                                quotechar='|', quoting=csv.QUOTE_MINIMAL)
        filewriter.writerow(['id', 'serial_id', 'device_tra', 'received_time'])
        filewriter.writerow([id, serial_id, device_tra, received_time])

    # Create a blob client using the local file name as the name for the blob
    blob_client = blob_service_client.get_blob_client(container=container_name, blob=local_file_name)

    print("\nUploading to Azure Storage as blob:\n\t" + local_file_name)

    # Upload the created file
    with open(upload_file_path, "rb") as data:
        blob_client.upload_blob(data)

    if id:
        return func.HttpResponse(f"Hello, {id}. This HTTP triggered function executed successfully.")
    else:
        return func.HttpResponse(
             "This HTTP triggered function executed successfully. Pass a name in the query string or in the request body for a personalized response.",
             status_code=200
        )

Upvotes: 1

Kashyap
Kashyap

Reputation: 17546

You can use the storage account Python SDK. This one is for ADLS Gen 2, if you are using Gen 1 or older find the right SDK here.

Look at "uploading a file" section, it shows how to write a string data to a blob. E.g. in code below you want to put your csv contents in variable data.

from azure.storage.filedatalake import DataLakeFileClient

data = b"abc"
file = DataLakeFileClient.from_connection_string("my_connection_string",
                                                 file_system_name="myfilesystem", file_path="myfile")

file.append_data(data, offset=0, length=len(data))
file.flush_data(len(data))

Some more samples here.

You'll want to use some csv writer that's writing to an in memory string (perhaps StringIO) instead of a local file and then write that string to ADLS.


In case it's not obvious from code and samples, you use req.params to access params. Nothing except body has a getter.

Upvotes: 0

Related Questions