spark.conf.set(
"fs.azure.account.key.<storage-account-name>.dfs.core.windows.net",
dbutils.secrets.get(scope="<scope-name", key="storage-account-access-key-name"))
spark.conf.set("fs.azure.createRemoteFileSystemDuringInitialization", "true")
dbutils.fs.ls("abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/")
spark.conf.set("fs.azure.createRemoteFileSystemDuringInitialization", "false")
df = spark.read.json("dbfs:/databricks-datasets/iot/iot_devices.json")
df.write.json("abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/iot_devices.json")
df2 = spark.read.json("abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/iot_devices.json")
display(df2)