Reputation: 33
I want to copy 2 tables from blob storage to SQL Database. I created pipeline like this:- Get MetaData:- For capturing the files (2 csv files) in the input container ForEach:- For iterating the files in input container Copy activity:- Inside the ForEach. Copy both of the files in SQL database.
Now, when I started debugging, I got the error 2200 and it says userBlobDoesNotExists.
Here is the error code for the copy activity:-
"copyDuration": 3,
"errors": [
{
"Code": 9013,
"Message": "ErrorCode=UserErrorSourceBlobNotExist,'Type=Microsoft.DataTransfer.Common.Shared.HybridDeliveryException,Message=The required Blob is missing. ContainerName: https://employeestorage1.blob.core.windows.net/employeeinput, path: employeeinput/workdetail.csv.,Source=Microsoft.DataTransfer.ClientLibrary,'",
"EventType": 0,
"Category": 5,
"Data": {},
"MsgId": null,
"ExceptionType": null,
"Source": null,
"StackTrace": null,
"InnerEventInfos": []
}
],
"effectiveIntegrationRuntime": "DefaultIntegrationRuntime (East US)",
"usedDataIntegrationUnits": 4,
"billingReference": {
"activityType": "DataMovement",
"billableDuration": [
{
"meterType": "AzureIR",
"duration": 0.06666666666666667,
"unit": "DIUHours"
}
]
},
"usedParallelCopies": 1,
"executionDetails": [
{
"source": {
"type": "AzureBlobStorage",
"region": "East US"
},
"sink": {
"type": "AzureSqlDatabase",
"region": "East US"
},
"status": "Failed",
"start": "2021-06-24T17:28:09.4507134Z",
"duration": 3,
"usedDataIntegrationUnits": 4,
"usedParallelCopies": 1,
"profile": {
"queue": {
"status": "Completed",
"duration": 2
},
"transfer": {
"status": "Completed",
"duration": 0
}
},
"detailedDurations": {
"queuingDuration": 2,
"transferDuration": 0
}
}
],
"dataConsistencyVerification": {
"VerificationResult": "Unsupported"
},
"durationInQueue": {
"integrationRuntimeQueue": 0
}
}
And here is the code for the pipeline:-
{
"name": "pipeline1",
"properties": {
"activities": [
{
"name": "inputfolder",
"type": "GetMetadata",
"dependsOn": [],
"policy": {
"timeout": "7.00:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
},
"userProperties": [],
"typeProperties": {
"dataset": {
"referenceName": "employeeinputdataset",
"type": "DatasetReference"
},
"fieldList": [
"childItems"
],
"storeSettings": {
"type": "AzureBlobStorageReadSettings",
"enablePartitionDiscovery": false
},
"formatSettings": {
"type": "DelimitedTextReadSettings"
}
}
},
{
"name": "for each table in input folder",
"type": "ForEach",
"dependsOn": [
{
"activity": "inputfolder",
"dependencyConditions": [
"Succeeded"
]
}
],
"userProperties": [],
"typeProperties": {
"items": {
"value": "@activity('inputfolder').output.Childitems",
"type": "Expression"
},
"activities": [
{
"name": "Copy data1",
"type": "Copy",
"dependsOn": [],
"policy": {
"timeout": "7.00:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
},
"userProperties": [],
"typeProperties": {
"source": {
"type": "DelimitedTextSource",
"storeSettings": {
"type": "AzureBlobStorageReadSettings",
"recursive": true,
"wildcardFolderPath": "employeeinput",
"wildcardFileName": {
"value": "@item().name",
"type": "Expression"
},
"enablePartitionDiscovery": false
},
"formatSettings": {
"type": "DelimitedTextReadSettings"
}
},
"sink": {
"type": "AzureSqlSink",
"tableOption": "autoCreate",
"disableMetricsCollection": false
},
"enableStaging": false,
"translator": {
"type": "TabularTranslator",
"typeConversion": true,
"typeConversionSettings": {
"allowDataTruncation": true,
"treatBooleanAsNumber": false
}
}
},
"inputs": [
{
"referenceName": "employeeinputdataset",
"type": "DatasetReference"
}
],
"outputs": [
{
"referenceName": "employeeoutputsql",
"type": "DatasetReference",
"parameters": {
"OutputTableName": {
"value": "@item().name",
"type": "Expression"
}
}
}
]
}
]
}
}
],
"annotations": []
}
}
Upvotes: 0
Views: 3589
Reputation: 8690
Don't choose Wildcard file path in the File path type setting, please choose File path in dataset to have a try.
And you need to create a parameter in your Source dataset. In the File path of Source dataset, type the @dataset().fileName
expression. Finally, pass @item().name
to the parameter of dataset is ok.
Screenshots:
Upvotes: 1