Workflow saved

This commit is contained in:
unknown
2025-08-29 09:14:36 +00:00
parent 1b840c8730
commit 2e42932fab
3 changed files with 56 additions and 1 deletions

View File

@@ -452,3 +452,29 @@ LEFT JOIN high_valued_payments___df c
""")
success_payment_metrics_df.createOrReplaceTempView('success_payment_metrics_df')
# %%
_success_payment_metrics_writer_fields_to_update = success_payment_metrics_df.columns
_success_payment_metrics_writer_set_clause=[]
_success_payment_metrics_writer_unique_key_clause= []
for _key in ['payment_date']:
_success_payment_metrics_writer_unique_key_clause.append(f't.{_key} = s.{_key}')
for _field in _success_payment_metrics_writer_fields_to_update:
if(_field not in _success_payment_metrics_writer_unique_key_clause):
_success_payment_metrics_writer_set_clause.append(f't.{_field} = s.{_field}')
_merge_query = '''
MERGE INTO dremio.successpaymentmetrics t
USING success_payment_metrics_df s
ON ''' + ' AND '.join(_success_payment_metrics_writer_unique_key_clause) + ''' WHEN MATCHED THEN
UPDATE SET ''' + ', '.join(_success_payment_metrics_writer_set_clause) + ' WHEN NOT MATCHED THEN INSERT *'
spark.sql(_merge_query)

View File

@@ -546,6 +546,35 @@ def success_payment_metrics(
""")
success_payment_metrics_df.createOrReplaceTempView('success_payment_metrics_df')
return (success_payment_metrics_df,)
@app.cell
def success_payment_metrics_writer(spark, success_payment_metrics_df):
_success_payment_metrics_writer_fields_to_update = success_payment_metrics_df.columns
_success_payment_metrics_writer_set_clause=[]
_success_payment_metrics_writer_unique_key_clause= []
for _key in ['payment_date']:
_success_payment_metrics_writer_unique_key_clause.append(f't.{_key} = s.{_key}')
for _field in _success_payment_metrics_writer_fields_to_update:
if(_field not in _success_payment_metrics_writer_unique_key_clause):
_success_payment_metrics_writer_set_clause.append(f't.{_field} = s.{_field}')
_merge_query = '''
MERGE INTO dremio.successpaymentmetrics t
USING success_payment_metrics_df s
ON ''' + ' AND '.join(_success_payment_metrics_writer_unique_key_clause) + ''' WHEN MATCHED THEN
UPDATE SET ''' + ', '.join(_success_payment_metrics_writer_set_clause) + ' WHEN NOT MATCHED THEN INSERT *'
spark.sql(_merge_query)
return

File diff suppressed because one or more lines are too long