Slackbot
02/20/2024, 1:55 PMElijah Ben Izzy
02/20/2024, 3:15 PMRoel Bertens
02/21/2024, 6:25 PM%%cell_to_module -m my_module --display --rebuild-drivers
from pyspark import sql as ps
from pyspark.sql.column import Column
from pyspark.sql import functions as F
def working(sdf: ps.DataFrame, filter_expression: Column) -> ps.DataFrame:
return sdf.filter(filter_expression)
def not_working(sdf: ps.DataFrame, filter_expression: Column = F.lit(True)) -> ps.DataFrame:
return sdf.filter(filter_expression)
Elijah Ben Izzy
02/21/2024, 6:33 PMRoel Bertens
02/22/2024, 8:20 AMUnexpected exception formatting exception. Falling back to standard exception
Traceback (most recent call last):
File "/opt/conda/lib/python3.11/site-packages/IPython/core/interactiveshell.py", line 3526, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "/tmp/ipykernel_77042/1071666969.py", line 1, in <module>
get_ipython().run_cell_magic('cell_to_module', '-m my_module --display', '\nfrom pyspark import sql as ps \nfrom pyspark.sql.column import Column\nfrom pyspark.sql import functions as F \n\ndef working(sdf: ps.DataFrame, filter_expression: Column) -> ps.DataFrame:\n return sdf.filter(filter_expression)\n\ndef not_working(sdf: ps.DataFrame, filter_expression: Column = F.lit(True)) -> ps.DataFrame:\n return sdf.filter(filter_expression)\n\n')
File "/opt/conda/lib/python3.11/site-packages/IPython/core/interactiveshell.py", line 2493, in run_cell_magic
result = fn(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^
File "/opt/conda/lib/python3.11/site-packages/hamilton/plugins/jupyter_magic.py", line 195, in cell_to_module
self.shell.ex(cell)
File "/opt/conda/lib/python3.11/site-packages/IPython/core/interactiveshell.py", line 2827, in ex
exec(cmd, self.user_global_ns, self.user_ns)
File "<string>", line 9, in <module>
File "/home/jovyan/.local/lib/python3.11/site-packages/pyspark/sql/utils.py", line 174, in wrapped
return f(*args, **kwargs)
^^^^^^^^^^^^^^^^^^
File "/home/jovyan/.local/lib/python3.11/site-packages/pyspark/sql/functions.py", line 193, in lit
return _invoke_function("lit", col)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/jovyan/.local/lib/python3.11/site-packages/pyspark/sql/functions.py", line 95, in _invoke_function
assert SparkContext._active_spark_context is not None
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
AssertionError
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/lib/python3.11/site-packages/IPython/core/interactiveshell.py", line 2120, in showtraceback
stb = self.InteractiveTB.structured_traceback(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/conda/lib/python3.11/site-packages/IPython/core/ultratb.py", line 1435, in structured_traceback
return FormattedTB.structured_traceback(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/conda/lib/python3.11/site-packages/IPython/core/ultratb.py", line 1326, in structured_traceback
return VerboseTB.structured_traceback(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/conda/lib/python3.11/site-packages/IPython/core/ultratb.py", line 1173, in structured_traceback
formatted_exception = self.format_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/conda/lib/python3.11/site-packages/IPython/core/ultratb.py", line 1063, in format_exception_as_a_whole
self.get_records(etb, number_of_lines_of_context, tb_offset) if etb else []
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/conda/lib/python3.11/site-packages/IPython/core/ultratb.py", line 1155, in get_records
FrameInfo(
File "/opt/conda/lib/python3.11/site-packages/IPython/core/ultratb.py", line 780, in __init__
ix = inspect.getsourcelines(frame)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/conda/lib/python3.11/inspect.py", line 1244, in getsourcelines
lines, lnum = findsource(object)
^^^^^^^^^^^^^^^^^^
File "/opt/conda/lib/python3.11/inspect.py", line 1081, in findsource
raise OSError('could not get source code')
OSError: could not get source code
Elijah Ben Izzy
02/22/2024, 3:07 PMElijah Ben Izzy
02/22/2024, 3:36 PMfrom pyspark.sql import functions as F, SparkSession
spark = SparkSession.builder.getOrCreate()
F.lit(True)
But this doesn’t:
from pyspark.sql import functions as F, SparkSession
F.lit(True)
Roel Bertens
02/22/2024, 4:16 PMElijah Ben Izzy
02/22/2024, 4:18 PMRoel Bertens
02/23/2024, 9:07 AM