我在 pyspark sql 中遇到以下错误。
org.apache.spark.sql.catalyst.parser.ParseException: 不匹配的输入“来自”期望(第 9 行,第 4 位)
select a.appl_sta,
a.appl_pcnvendorcode,
a.appl_pcn,
b.appl_corp_strct_in,
a.base_dcsn_dt,
a.pcn_curr_cd,
a.pcn_mo,
case when trim(upper(b.appl_corp_strct_in)) = 'S' then 1 else 0 end as sol_ind
from lod1_aml_p2_413_gp_base a
----^^^
left join
(
with abc as(
select appl_pcn,appl_corp_strct_in,Row_number () OVER(PARTITION BY appl_pcn ORDER BY gcap_extr_dt_tm DESC) AS Recrd_rank1
from cstonedb3.gcap_applcnt_data where appl_no =0
and pcn_mo between string(cast(date_format(add_months('2023-09-26', -2),'yyyyMM') as int)) and string(date_format('2023-09-26','yyyyMM'))
)select * from abc where Recrd_rank1 =1
)b
on a.appl_pcn = b.appl_pcn
有人可以帮忙看看上面的查询出了什么问题吗?该查询在 Hive 中运行良好,但在 Sparlsql 中抛出错误
尝试下面的 SQL 查询。
WITH
gcap_applcnt_data AS (
SELECT
appl_pcn,
appl_corp_strct_in,
Row_number() OVER (
PARTITION BY
appl_pcn
ORDER BY gcap_extr_dt_tm DESC
) AS rnk,
appl_no,
CAST(pcn_mo AS INT) AS pcn_mo,
CAST(
DATE_FORMAT(
ADD_MONTHS('2023-09-26', -2), 'yyyyMM'
) as int
) AS date_from,
DATE_FORMAT('2023-09-26', 'yyyyMM') AS date_to,
(
CASE
WHEN TRIM(UPPER(appl_corp_strct_in)) = 'S' THEN 1
ELSE 0
END
) AS sol_ind
FROM cstonedb3.gcap_applcnt_data
),
gp_base AS (
SELECT
appl_pcnvendorcode,
appl_pcn,
base_dcsn_dt,
pcn_curr_cd,
pcn_mo
FROM lod1_aml_p2_413_gp_base
),
gcap_applcnt_data_cte AS (
SELECT
appl_pcn,
appl_corp_strct_in,
sol_ind
FROM gcap_applcnt_data
WHERE
appl_no = 0
AND pcn_mo BETWEEN from_date AND to_date
AND rnk = 1
)
SELECT l.appl_pcnvendorcode, l.appl_pcn, r.appl_corp_strct_in, l.base_dcsn_dt, l.pcn_curr_cd, l.pcn_mo, r.sol_ind
FROM gp_base l
LEFT JOIN gcap_applcnt_data_cte r ON l.appl_pcn = r.appl_pcn