Skip to content

RUNME fails on serverless and single-user cluster #2

@nathanknox

Description

@nathanknox

May be a known issue, but I'm trying to be more diligent about engaging and opening issues where I find them.

When run on serverless, Fails on cmd 3: from solacc.companion import NotebookSolutionCompanion:

[[JVM_ATTRIBUTE_NOT_SUPPORTED](https://docs.microsoft.com/azure/databricks/error-messages/error-classes#jvm_attribute_not_supported)] Directly accessing the underlying Spark driver JVM using the attribute 'sparkContext' is not supported on serverless compute. If you require direct access to these fields, consider using a single-user cluster. For more details on compatibility and limitations, check: https://learn.microsoft.com/azure/databricks/release-notes/serverless#limitations

When run on a single-user cluster, I'm seeing the same cell (cmd3) fails with this error message:

TypeError: 'JavaPackage' object is not callable
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-0b5cd1ff-d1be-4d3b-b561-12cae5388640/lib/python3.12/site-packages/dbacademy/__init__.py:4, in validate_dependencies()
      2 try:
      3     # noinspection PyUnresolvedReferences,PyUnboundLocalVariable
----> 4     assert validate_dependencies_already
      5 except NameError:
UnboundLocalError: cannot access local variable 'validate_dependencies_already' where it is not associated with a value

During handling of the above exception, another exception occurred:
TypeError                                 Traceback (most recent call last)
File <command-5354328770069829>, line 1
----> 1 from solacc.companion import NotebookSolutionCompanion
File /databricks/python_shell/lib/dbruntime/autoreload/discoverability/hook.py:71, in AutoreloadDiscoverabilityHook._patched_import(self, name, *args, **kwargs)
     65 if not self._should_hint and (
     66     (module := sys.modules.get(absolute_name)) is not None and
     67     (fname := get_allowed_file_name_or_none(module)) is not None and
     68     (mtime := os.stat(fname).st_mtime) > self.last_mtime_by_modname.get(
     69         absolute_name, float("inf")) and not self._should_hint):
     70     self._should_hint = True
---> 71 module = self._original_builtins_import(name, *args, **kwargs)
     72 if (fname := fname or get_allowed_file_name_or_none(module)) is not None:
     73     mtime = mtime or os.stat(fname).st_mtime
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-0b5cd1ff-d1be-4d3b-b561-12cae5388640/lib/python3.12/site-packages/solacc/companion/__init__.py:2
      1 # Databricks notebook source
----> 2 from dbacademy.dbrest import DBAcademyRestClient
      3 from dbruntime.display import displayHTML
      4 from databricks.sdk import WorkspaceClient
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-0b5cd1ff-d1be-4d3b-b561-12cae5388640/lib/python3.12/site-packages/dbacademy/__init__.py:11
      7         dbgems.validate_dependencies("dbacademy")
      8         validate_dependencies_already = True
---> 11 validate_dependencies()
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-0b5cd1ff-d1be-4d3b-b561-12cae5388640/lib/python3.12/site-packages/dbacademy/__init__.py:7, in validate_dependencies()
      5 except NameError:
      6     from dbacademy import dbgems
----> 7     dbgems.validate_dependencies("dbacademy")
      8     validate_dependencies_already = True
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-0b5cd1ff-d1be-4d3b-b561-12cae5388640/lib/python3.12/site-packages/dbacademy/dbgems/__init__.py:187, in validate_dependencies(module, curriculum_workspaces_only)
    185 def validate_dependencies(module: str, curriculum_workspaces_only=True) -> bool:
    186     # Don't do anything unless this is in one of the Curriculum Workspaces
--> 187     testable = curriculum_workspaces_only is False or is_curriculum_workspace()
    188     try:
    189         if testable:
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-0b5cd1ff-d1be-4d3b-b561-12cae5388640/lib/python3.12/site-packages/dbacademy/dbgems/__init__.py:181, in is_curriculum_workspace()
    180 def is_curriculum_workspace() -> bool:
--> 181     host_name = get_browser_host_name(default_value="unknown")
    182     return host_name.startswith("curriculum-") and host_name.endswith(".cloud.databricks.com")
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-0b5cd1ff-d1be-4d3b-b561-12cae5388640/lib/python3.12/site-packages/dbacademy/dbgems/__init__.py:101, in get_browser_host_name(default_value)
    100 def get_browser_host_name(default_value=None):
--> 101     return get_tag(tag_name="browserHostName", default_value=default_value)
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-0b5cd1ff-d1be-4d3b-b561-12cae5388640/lib/python3.12/site-packages/dbacademy/dbgems/__init__.py:93, in get_tag(tag_name, default_value)
     91     return default_value
     92 else:
---> 93     raise e
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-0b5cd1ff-d1be-4d3b-b561-12cae5388640/lib/python3.12/site-packages/dbacademy/dbgems/__init__.py:87, in get_tag(tag_name, default_value)
     85 def get_tag(tag_name: str, default_value: str = None) -> str:
     86     try:
---> 87         value = get_tags().get(tag_name)
     88         return value or default_value
     89     except Exception as e:
File /local_disk0/.ephemeral_nfs/envs/pythonEnv-0b5cd1ff-d1be-4d3b-b561-12cae5388640/lib/python3.12/site-packages/dbacademy/dbgems/__init__.py:81, in get_tags()
     79 tags = dbutils.entry_point.getDbutils().notebook().getContext().tags()
     80 # noinspection PyProtectedMember,PyUnresolvedReferences
---> 81 java_map = sc._jvm.scala.collection.JavaConversions.mapAsJavaMap(tags)
     82 return java_map

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions