Hi
@Martin Dráb, I've extended
Electronic Reporting (ER) import source settings so that instead of a simple file system folder, I want fetch files from
Azure Blob Storage. I've referred this
Implement a custom ER source of inbound documents - Finance & Operations | Dynamics 365 | Microsoft Learn and
D365F&O. File based integration using Azure Blob Storage. But in electronic reporting source > file states for the source log it did not fetch the file from blob instead of I keep getting this error.
"The input is not a valid Base-64 string as it contains a non-base 64 character, more than two padding characters, or an illegal character among the padding characters."
using Microsoft.Dynamics365.LocalizationFramework;
using System.IO;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Blob;
using Microsoft.Azure.Storage.Auth.StorageCredentials;
using Microsoft.Azure.Storage.CloudStorageAccount;
using Microsoft.Azure.Storage.Blob.CloudBlobClient;
using Microsoft.Azure.Storage.Blob.CloudBlobContainer;
using Microsoft.Azure.Storage.Blob.CloudBlobDirectory;
using System.IO.MemoryStream;
using System.IO.StreamReader;
/// <summary>
/// File source as a folder
/// </summary>
public class YS_InboundFileSource implements ERIFileSource
{
private str folderPath;
/// <summary>
/// Creates a new instance of <C>ERFileSourceFolder</C>.
/// </summary>
/// <param name = "_folderPath">A folder path.</param>
/// <param name = "_fileMask">A file mask to filter; optional.</param>
protected void new(str _folderPath)
{
folderPath = _folderPath;
}
/// <summary>
/// Construct new instance of <c>ERFileSourceFolder</c>.
/// </summary>
/// <param name = "_folderPath">A folder path.</param>
/// <param name = "_fileMask">A file mask to filter; optional.</param>
/// <returns>New instance of class.</returns>
internal static YS_InboundFileSource construct(str _folderPath)
{
return new YS_InboundFileSource(_folderPath);
}
public str parmFolderPath(str _value = folderPath)
{
folderPath = _value;
return folderPath;
}
[Hookable(false)]
public ERIFiles GetFiles()
{
var ret = new ERFiles();
Microsoft.Azure.Storage.Auth.StorageCredentials storageCredentials = new Microsoft.Azure.Storage.Auth.StorageCredentials("stroageaccount ", "acccountkey");
Microsoft.Azure.Storage.CloudStorageAccount cloudStorageAccount = new Microsoft.Azure.Storage.CloudStorageAccount(storageCredentials, true);
Microsoft.Azure.Storage.Blob.CloudBlobClient cloudBlobClient = new Microsoft.Azure.Storage.Blob.CloudBlobClient(cloudStorageAccount.BlobStorageUri, storageCredentials, null);
Microsoft.Azure.Storage.Blob.CloudBlobContainer cloudBlobContainer = cloudBlobClient.GetContainerReference("container");
Microsoft.Azure.Storage.Blob.CloudBlobDirectory cloudBlobDirectory = cloudBlobContainer.GetDirectoryReference(folderPath);
var listOfBlobs = cloudBlobDirectory.ListBlobs(true, 0, null, null);
var enumerator = listOfBlobs.GetEnumerator();
while(enumerator.MoveNext())
{
Microsoft.Azure.Storage.Blob.CloudBlockBlob cloudBlockBlob = enumerator.get_current() as Microsoft.Azure.Storage.Blob.CloudBlockBlob;
if(cloudBlockBlob)
{
str blobTier = cloudBlockBlob.Properties.StandardBlobTier.ToString();
if(blobTier != "Archive")
{
System.IO.MemoryStream memStream = new System.IO.MemoryStream();
cloudBlockBlob.DownloadToStream(memStream, null, null, null);
memStream.Seek(0, System.IO.SeekOrigin::Begin);
ERIFile file = YS_InboundFile::construct(
cloudBlockBlob.Name,
cloudBlockBlob.Name,
DateTimeUtil::getSystemDateTime(),
folderPath,
memStream
);
ret.Add(file);
}
}
}
return ret;
}
}