在 Windows 任务计划程序中运行脚本时不触发电子邮件

问题描述 投票:0回答:1

我有一个 python 脚本,它运行一个 SQL 文件并将输出生成到某个位置。但是,它还意味着发送一封电子邮件,指示每个文件的行数。当我在 Visual Code Studio 中运行脚本时,此电子邮件可以正常触发。但是,当我尝试自动化脚本并通过任务计划程序运行它时,它不会触发电子邮件。任务计划程序中的其他所有内容都运行良好,并且输出正确,但只是电子邮件未触发。任务计划程序正在以最高权限运行。任何帮助将不胜感激!

SQLProcessor is called and executed in this sql_processor.py script 

Control variables are defined in the controls.py file

If email, then outlook should be open on the machine to send the email

The user running the script should have permission to read and write to 
the data sources required

Oracle connection through temp_propensity (may need to be updated)
"""
import os 
import re
import csv 
import shutil 
import logging
import requests
from Utils import utils as fh 
import pandas as pd 
from datetime import datetime
import win32com.client as win32

import controls as ctrl


# Set up custom logging, adding headers for Tableau
filehandler = fh.FileHandlerWithHeader(ctrl.LOG_PATH+ctrl.LOG_FILE, 
                                       "Level\t | Datestamp\t | Message", 
                                       delay=False
                                       )

formatter = logging.Formatter("%(levelname)s | %(asctime)s | %(message)s")
filehandler.setFormatter(formatter)

# Create the log file 
logger = logging.getLogger(ctrl.LOG_PATH+ctrl.LOG_FILE)
logger.setLevel(logging.INFO)
logger.addHandler(filehandler)


class SQLProcessor():
   def __init__(self):
         pass

   def _run_script(self):
      """
      Read in .sql script, replace ':xx' parameters/names and then execute sql 
      query.

      :param filename: name of .sql file to run
      :param path: path to .sql scripts
      :param sql_params: dict containing parameters to control .sql query
      """   
      # Do initial log of date, machine, etc. 
      logger.info(f"Running Project in: {os.getcwd()}")
      logger.info(f"Run Date: {ctrl.RUN_ID}")
      logger.info(f"Running Job: {ctrl.SUBJECT}")
      logger.info(f"Running on PC: {os.getenv('COMPUTERNAME')}")
         
      # Get code from GH 
      try: 
         resp = requests.get(
               url=ctrl.SQL_URL,
               proxies=ctrl.PROXIES, 
               verify=False, 
               auth=ctrl.AUTH
               )
         logger.info(f"Fetching code from Github: {str(resp)}")
      except Exception as msg: 
         logger.error(msg.args[0])
         
      # Add parameters if required
      if ctrl.REPLACE_TEXT:
         sqlfile = self.replace_text(file, ctrl.SQL_PARAMS)
      else: 
         sqlfile = resp.text 
            
      # Write the SQL file and also a backup 
      with open(f"{ctrl.SQL_BKP_PATH}\\{ctrl.SQL_FILE}", "w") as file:
         file.write(sqlfile)
      with open(f"{ctrl.SQL_BKP_PATH}\\{ctrl.SQL_FILE[:-4]}_{ctrl.RUN_ID}.sql", "w") as file:
         file.write(sqlfile)
      
      logging.info(f"SQL file written to .\\{ctrl.SQL_FILE}")
      logging.info(f"""SQL backup file written to {ctrl.SQL_BKP_PATH}\\{ctrl.SQL_FILE}_{ctrl.RUN_ID}""")

      # Open and read the file as a single buffer
      fd = open(f"{ctrl.SQL_BKP_PATH}\\{ctrl.SQL_FILE}", 'r')
      sqlfile = fd.read()
      fd.close()

      # Loops through queries (sep with ;) and executes
      self._execute_sql(sqlfile)
      logger.info("Finished executing SQL file")
      
      # Try copy the controls file, save as text file 
      try: 
         ctrl_bkp_path = f"{ctrl.CONTROL_BKP_PATH}\controls_{ctrl.SQL_FILE[:-4]}_{ctrl.RUN_ID}.txt"
         fd = open("controls.py", 'r')
         controls = fd.read()
         fd.close()
         with open(ctrl_bkp_path, "w") as output:
            output.write(controls)
         logger.info(f"Controls file copied to: {ctrl_bkp_path}")
      except Exception as msg:
            message = msg.args[0]
            logger.error(message)
      

   def _execute_sql(self, sqlfile: str):
      """Execute .sql script with multiple sections (separated by ';')
         
         Skip and/or reporting of errors
         For example, if the tables do not yet exist, this will skip over
         the **DROP TABLE** commands
      """
      # Split SQL commands
      sqlcommands = sqlfile.split(';')
      ii = 1
      c = ctrl.ENGINE.connect()
      logger.info(f"Oracle Params: {ctrl.user}@{ctrl.sid}")
      for cmd in sqlcommands:        
         # create connection
         logger.info(f"Running Query {ii} of {len(sqlcommands)}")
         try:
            cmd = cmd.replace('\n', ' ')
            c.execute(cmd)
            c.execute('COMMIT')
            logger.info(f"Query Run Successfully: {cmd}")
         except Exception as msg:
               message = msg.args[0]
               logger.error(f"{message}: {cmd}")
         ii += 1
      # Close connection 
      c.close()     
   
        
   def output_csv(self, email=ctrl.SEND_EMAIL):
      "Read in final table and output to csv"
      con = ctrl.ENGINE.connect()
      for table, output_path in ctrl.OUTPUT_TABLES.items():
         # Loop through tables and output to respective output paths
         data = pd.read_sql(f"select * from {table}", con=con)
         rows = data.shape[0]
         self.row_count = rows 
         
         if rows > 0: 
            logger.info(f"Output {table} row count: {rows}")
         else: 
            logger.warning(f"Output {table} row count: {rows} **NO DATA RETURNED**")
            if email: 
               self.send_mail(
                  mail=ctrl.MESSAGE_NO_DATA.format(
                  row_count=self.row_count,
                  log_path=ctrl.LOG_PATH,
                  log_file=ctrl.LOG_FILE,
                  )
               )
         try:
            # This will overwrite a file if exists
            data.to_csv(f"{output_path}{table}_{ctrl.RUN_ID}.csv",
                     index=False,
                     quoting=csv.QUOTE_ALL, 
                     encoding='utf-8-sig'
                     )
            logger.info(
               f"File output sucessful: {output_path}{table}.csv"
               )
            if email: 
               self.send_mail(mail=ctrl.MESSAGE_SUCCESS.format(
                              row_count=self.row_count,
                              output_path=output_path,
                              output_table=table,
                           ))
         except Exception as msg:
            logger.warning(msg)
      con.close()

   
   @staticmethod
   def backup_file(file, backup_file):
      try: 
         shutil.copy(file, backup_file)
         logger.info(f"{file} backuped up with datestamp: {ctrl.RUN_ID}")
      except Exception as msg: 
         logger.error("Backing up file {file} failed: {msg}")


   @staticmethod
   def replace_text(string: str, dic: dict):
      """replace string text with dictionary values
      **Called internally by _run_script**"""
      for i in dic.keys():
         if f':{i}' in string:
            string = re.sub(f':{i}', str(dic[i]), string)
            logging.info(
               f"Replacing SQL Params: {i} replace with {str(dic[i])}"
               )
      return string


   @staticmethod
   def mailer(message):
      outlook = win32.Dispatch('outlook.application')
      mail = outlook.CreateItem(0)
      mail.To = ctrl.RECIPIENTS
      mail.cc = ctrl.CC
      mail.Subject = ctrl.SUBJECT
      # Pass message in send_mail wrapper
      mail.Body = message
      mail.Send()


   def send_mail(self, mail):
      """Wrapper for mailer, added logging"""
      try: 
         self.mailer(message=mail)
         logger.info("Email sent to stakeholders")
      except Exception as msg:
         logger.warning(msg) 
     
   def main(self):
      "Wrapper method"
      self._run_script()
      
      if ctrl.OUTPUT_CSV:
         self.output_csv(email=ctrl.SEND_EMAIL)
      
      # Back log file at end of run 
      self.backup_file(
         f"{ctrl.LOG_PATH}{ctrl.LOG_FILE}",
         f"{ctrl.LOG_PATH}{ctrl.LOG_FILE[:-4]}_{ctrl.RUN_ID}.log"
         )                


if __name__ == "__main__":
   
   extract = SQLProcessor().main()
python python-3.x scheduled-tasks scheduler
1个回答
0
投票

我的猜测是该任务在未配置 Outlook 客户端的帐户下运行。

任务在什么用户帐户下运行?以该帐户登录并启动 Outlook。这可能会告诉您问题是什么。

© www.soinside.com 2019 - 2024. All rights reserved.