Workflow saved
This commit is contained in:
@@ -299,12 +299,6 @@ failed_payments_mapper_df.createOrReplaceTempView("failed_payments_mapper_df")
|
|||||||
|
|
||||||
# %%
|
# %%
|
||||||
|
|
||||||
print(failed_payments_mapper_df.columns)
|
|
||||||
final_failed_payments_df = spark.sql("select * from failed_payments_mapper_df where payment_date >= COALESCE((SELECT MAX(DATE(payment_date)) FROM dremio.failedpaymentmetrics), (SELECT MIN(payment_date) FROM failed_payments_mapper_df))")
|
|
||||||
final_failed_payments_df.createOrReplaceTempView('final_failed_payments_df')
|
|
||||||
|
|
||||||
# %%
|
|
||||||
|
|
||||||
print(final_failed_payments_df.columns)
|
print(final_failed_payments_df.columns)
|
||||||
filter__13_df = spark.sql("select * from final_failed_payments_df where gateway = \'CCS\'")
|
filter__13_df = spark.sql("select * from final_failed_payments_df where gateway = \'CCS\'")
|
||||||
filter__13_df.createOrReplaceTempView('filter__13_df')
|
filter__13_df.createOrReplaceTempView('filter__13_df')
|
||||||
@@ -478,3 +472,10 @@ _merge_query = '''
|
|||||||
spark.sql(_merge_query)
|
spark.sql(_merge_query)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# %%
|
||||||
|
|
||||||
|
print(FailedPaymentsData_df.columns)
|
||||||
|
LatestFailedPayments_df = spark.sql("select * from FailedPaymentsData_df where payment_date >= COALESCE((SELECT MAX(DATE(payment_date)) FROM dremio.failedpaymentmetrics), (SELECT MIN(payment_date) FROM failed_payments_mapper_df))")
|
||||||
|
failed_payments_df.createOrReplaceTempView('failed_payments_df')
|
||||||
|
failed_payments_df.persist()
|
||||||
|
|||||||
@@ -354,16 +354,7 @@ def failed_payments_mapper(failed_payments_reader_df, job_id, spark):
|
|||||||
|
|
||||||
failed_payments_mapper_df=spark.sql(("SELECT " + ', '.join(_failed_payments_mapper_select_clause) + " FROM failed_payments_reader_df").replace("{job_id}",f"'{job_id}'"))
|
failed_payments_mapper_df=spark.sql(("SELECT " + ', '.join(_failed_payments_mapper_select_clause) + " FROM failed_payments_reader_df").replace("{job_id}",f"'{job_id}'"))
|
||||||
failed_payments_mapper_df.createOrReplaceTempView("failed_payments_mapper_df")
|
failed_payments_mapper_df.createOrReplaceTempView("failed_payments_mapper_df")
|
||||||
return (failed_payments_mapper_df,)
|
return
|
||||||
|
|
||||||
|
|
||||||
@app.cell
|
|
||||||
def final_failed_payments(failed_payments_mapper_df, spark):
|
|
||||||
|
|
||||||
print(failed_payments_mapper_df.columns)
|
|
||||||
final_failed_payments_df = spark.sql("select * from failed_payments_mapper_df where payment_date >= COALESCE((SELECT MAX(DATE(payment_date)) FROM dremio.failedpaymentmetrics), (SELECT MIN(payment_date) FROM failed_payments_mapper_df))")
|
|
||||||
final_failed_payments_df.createOrReplaceTempView('final_failed_payments_df')
|
|
||||||
return (final_failed_payments_df,)
|
|
||||||
|
|
||||||
|
|
||||||
@app.cell
|
@app.cell
|
||||||
@@ -578,5 +569,15 @@ def success_payment_metrics_writer(spark, success_payment_metrics_df):
|
|||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@app.cell
|
||||||
|
def failed_payments(FailedPaymentsData_df, failed_payments_df, spark):
|
||||||
|
|
||||||
|
print(FailedPaymentsData_df.columns)
|
||||||
|
LatestFailedPayments_df = spark.sql("select * from FailedPaymentsData_df where payment_date >= COALESCE((SELECT MAX(DATE(payment_date)) FROM dremio.failedpaymentmetrics), (SELECT MIN(payment_date) FROM failed_payments_mapper_df))")
|
||||||
|
failed_payments_df.createOrReplaceTempView('failed_payments_df')
|
||||||
|
failed_payments_df.persist()
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
app.run()
|
app.run()
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user