Description
Describe the bug
A clear and concise description of what the bug is.
To Reproduce
Steps to reproduce the behavior:
- Add semantic-link-labs as public lib in env
- Attach lib to the Fabric notebook
- Execute notebookutils.nbResPath.
- See error
Expected behavior
I expect I can use notebookutils.
Screenshots
Exception message:
ModuleNotFoundError Traceback (most recent call last)
Cell In[16], line 1
----> 1 from notebookutils import mssparkutils
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/notebookutils/init.py:5
1 version = "1.1.5"
3 all = ["cognitiveService", "data", "fs", "lakehouse", "notebook", "session", "runtime", "help", "warehouse", "workspace", "fabricClient", "credentials", "PBIClient", "udf"]
----> 5 from . import cognitiveService, data, fs, lakehouse, notebook, session, warehouse, workspace, fabricClient, credentials, PBIClient, udf
7 # TODO: this line should be removed after update runner ipynb code
8 from notebookutils.visualization import displayHTML
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/notebookutils/cognitiveService.py:1
----> 1 from .mssparkutils.handlers import CognitiveServcieHandler
3 cs = CognitiveServcieHandler()
5 def getEndpointAndKey(lsName):
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/notebookutils/mssparkutils/init.py:5
2 from os.path import dirname, basename, isfile, join
3 import glob
----> 5 from .handlers import RuntimeHandler
6 from ..common.logger import deprecated, print_deprecated_message
8 modules = glob.glob(join(dirname(file), "*.py"))
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/notebookutils/mssparkutils/handlers/init.py:1
----> 1 from .fsHandler import SynapseFSHandler
2 from .notebookHandler import SynapseNotebookHandler
3 from .runtimeHandler import RuntimeHandler
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/notebookutils/mssparkutils/handlers/fsHandler.py:2
1 from six import string_types
----> 2 from notebookutils.visualization import displayHTML, display_mount_points
3 from notebookutils.common.logger import log4jLogger
5 from .baseHandler import SynapseBaseHandler
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/notebookutils/visualization/init.py:1
----> 1 from .display import display, display_mount_points
2 from .displayHTML import displayHTML
3 from .msInlinePlotlib import enable_msinline_backend as enableMatplotlib
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/notebookutils/visualization/display.py:17
14 from notebookutils.ipythoninterpreter import is_ipython_enabled
15 from notebookutils.ipython import runtime
---> 17 import notebookutils.visualization.display_jupyter as display_jupyter
18 from notebookutils.visualization.constants import SYNAPSE_DISPLAY_WIDGET_TYPE_KEY, MAX_ROW_COUNT
19 from notebookutils.visualization.utils import sparkContextHelper
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/notebookutils/visualization/display_jupyter.py:11
8 from notebookutils.common.logger import log4jLogger
9 from notebookutils.visualization.constants import JUPYTER_DISPLAY_OUTPUT_TYPE_KEY, SYNAPSE_DISPLAY_WIDGET_TYPE_KEY,
10 MAX_ROW_COUNT, MAX_CONTENT_LENGTH
---> 11 from notebookutils.visualization.dataWrangler import get_wrangler_display_entry_context, WRANGLER_ENTRY_CONTEXT_KEY
13 # Map to the unified type set that will be consumed by client side
14 # We use Spark data type .simpleString() as the unified type set
15 # Some of the types are not common Pandas dtype, like boolean/uint64. But it may appear in some DataFrame generated
16 # by 3rd party library like sempy.
17 _pandas_type_mapping = {
18 'datetime64[ns]': 'timestamp',
19 'int8': 'tinyint',
(...)
31 'boolean': 'boolean'
32 }
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/notebookutils/visualization/dataWrangler.py:3
1 from notebookutils.common.logger import log4jLogger
2 import sys
----> 3 import pandas as pd
4 import pyspark
6 WRANGLER_ENTRY_CONTEXT_KEY = "wranglerEntryContext"
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/pandas/init.py:236
195 doc = """
196 pandas - a powerful data analysis and manipulation library for Python
197 =====================================================================
(...)
232 conversion, moving window statistics, date shifting and lagging.
233 """
235 from fsspec.registry import register_implementation
--> 236 from fsspec_wrapper.trident.core import OnelakeFileSystem
237 register_implementation('abfs', OnelakeFileSystem)
238 register_implementation('abfss', OnelakeFileSystem)
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/fsspec_wrapper/init.py:1
----> 1 from .core import AzureBlobFileSystem
2 from .trident.core import OnelakeFileSystem
3 from .version import VERSION as version # noqa
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/fsspec_wrapper/core.py:5
2 import time
3 from urllib.parse import urlsplit
----> 5 import adlfs
6 from fsspec.utils import infer_storage_options
8 from .utils import logger as synapseml_pandas_logger
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/adlfs/init.py:2
1 from .gen1 import AzureDatalakeFileSystem
----> 2 from .spec import AzureBlobFile, AzureBlobFileSystem
4 all = ["AzureBlobFileSystem", "AzureBlobFile", "AzureDatalakeFileSystem"]
6 try:
File ~/cluster-env/clonedenv/lib/python3.11/site-packages/adlfs/spec.py:20
17 from glob import has_magic
18 from typing import Optional, Tuple
---> 20 from azure.core.exceptions import (
21 HttpResponseError,
22 ResourceExistsError,
23 ResourceNotFoundError,
24 )
25 from azure.storage.blob import (
26 BlobBlock,
27 BlobProperties,
(...)
30 generate_blob_sas,
31 )
32 from azure.storage.blob.aio import BlobPrefix
ModuleNotFoundError: No module named 'azure.core.exceptions'