configs = {
"fs.azure.account.auth.type": "CustomAccessToken",
"fs.azure.account.custom.token.provider.class": spark.conf.get("spark.databricks.passthrough.adls.gen2.tokenProviderClassName")
}
spark.conf.set("fs.azure.createRemoteFileSystemDuringInitialization", "true")
dbutils.fs.ls("abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/")
spark.conf.set("fs.azure.createRemoteFileSystemDuringInitialization", "false")
df = spark.read.json("dbfs:/databricks-datasets/iot/iot_devices.json")
df.write.json("abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/iot_devices.json")
df2 = spark.read.json("abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/iot_devices.json")
display(df2)
dbutils.fs.mount(
source = "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/",
mount_point = "/mnt/<mount-name>",
extra_configs = configs)
dbutils.fs.ls("/mnt/<mount-name>")
df2 = spark.read.json("/mnt/<mount-name>/iot_devices.json")
%sql
CREATE DATABASE <db-name>
LOCATION "/mnt/<mount-name>"
df.write.saveAsTable("<name")
SELECT * FROM <name> LIMIT 10
DROP TABLE <name>
dbutils.fs.unmount("/mnt/<mount-name>")