Workflow saved

This commit is contained in:
unknown
2025-08-29 09:00:50 +00:00
parent 0dcde9e5cb
commit ab9c61fddd
3 changed files with 60 additions and 1 deletions

View File

@@ -204,3 +204,31 @@ failed_payments_update_mapper_df.createOrReplaceTempView("failed_payments_update
print(payment_api_df.columns)
success_payment_filter_df = spark.sql("select * from payment_api_df where response_body.success=\'true\'")
success_payment_filter_df.createOrReplaceTempView('success_payment_filter_df')
# %%
_success_payments_mapper_select_clause=success_payment_filter_df.columns if False else []
_success_payments_mapper_select_clause.append("uuid() AS id")
_success_payments_mapper_select_clause.append("account_id AS account_id")
_success_payments_mapper_select_clause.append("currency AS currency")
_success_payments_mapper_select_clause.append("payment_id AS payment_id")
_success_payments_mapper_select_clause.append("gateway AS gateway")
_success_payments_mapper_select_clause.append("payment_method AS payment_method")
_success_payments_mapper_select_clause.append("payment_date AS payment_date")
_success_payments_mapper_select_clause.append("date_format(current_timestamp(), \"yyyy-MM-dd\'T\'HH:mm:ss.SSS\") AS created_at")
_success_payments_mapper_select_clause.append("date_format(current_timestamp(), \"yyyy-MM-dd\'T\'HH:mm:ss.SSS\") AS updated_at")
_success_payments_mapper_select_clause.append("CAST(amount AS DECIMAL(10,2)) AS amount")
success_payments_mapper_df=spark.sql(("SELECT " + ', '.join(_success_payments_mapper_select_clause) + " FROM success_payment_filter_df").replace("{job_id}",f"'{job_id}'"))
success_payments_mapper_df.createOrReplaceTempView("success_payments_mapper_df")

View File

@@ -230,6 +230,37 @@ def success_payment_filter(payment_api_df, spark):
print(payment_api_df.columns)
success_payment_filter_df = spark.sql("select * from payment_api_df where response_body.success=\'true\'")
success_payment_filter_df.createOrReplaceTempView('success_payment_filter_df')
return (success_payment_filter_df,)
@app.cell
def success_payments_mapper(job_id, spark, success_payment_filter_df):
_success_payments_mapper_select_clause=success_payment_filter_df.columns if False else []
_success_payments_mapper_select_clause.append("uuid() AS id")
_success_payments_mapper_select_clause.append("account_id AS account_id")
_success_payments_mapper_select_clause.append("currency AS currency")
_success_payments_mapper_select_clause.append("payment_id AS payment_id")
_success_payments_mapper_select_clause.append("gateway AS gateway")
_success_payments_mapper_select_clause.append("payment_method AS payment_method")
_success_payments_mapper_select_clause.append("payment_date AS payment_date")
_success_payments_mapper_select_clause.append("date_format(current_timestamp(), \"yyyy-MM-dd\'T\'HH:mm:ss.SSS\") AS created_at")
_success_payments_mapper_select_clause.append("date_format(current_timestamp(), \"yyyy-MM-dd\'T\'HH:mm:ss.SSS\") AS updated_at")
_success_payments_mapper_select_clause.append("CAST(amount AS DECIMAL(10,2)) AS amount")
success_payments_mapper_df=spark.sql(("SELECT " + ', '.join(_success_payments_mapper_select_clause) + " FROM success_payment_filter_df").replace("{job_id}",f"'{job_id}'"))
success_payments_mapper_df.createOrReplaceTempView("success_payments_mapper_df")
return

File diff suppressed because one or more lines are too long