val configs = Map( "dfs.adls.oauth2.access.token.provider.type" -> "ClientCredential", "dfs.adls.oauth2.client.id" -> "<aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee>", "dfs.adls.oauth2.credential" -> "<NzQzY2QzYTAtM2I3Zi00NzFmLWI3MGMtMzc4MzRjZmk=>", "dfs.adls.oauth2.refresh.url" -> "https://login.microsoftonline.com/<ffffffff-gggg-hhhh-iiii-jjjjjjjjjjjj>/oauth2/token") dbutils.fs.mount( source = "adl://kpadls.azuredatalakestore.net/", mountPoint = "/mnt/kp-adls", extraConfigs = configs)
spark.conf.set("dfs.adls.oauth2.access.token.provider.type", "ClientCredential") spark.conf.set("dfs.adls.oauth2.client.id", "<aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee>") spark.conf.set("dfs.adls.oauth2.credential", "<NzQzY2QzYTAtM2I3Zi00NzFmLWI3MGMtMzc4MzRjZmk=>") spark.conf.set("dfs.adls.oauth2.refresh.url", "https://login.microsoftonline.com/<ffffffff-gggg-hhhh-iiii-jjjjjjjjjjjj>/oauth2/token")
This example notebook closely follows the Databricks documentation for how to set up Azure Data Lake Store as a data source in Databricks.
Last refresh: Never