import logging logging.info("this is an information")
import logging logging.log(logging.INFO, "this is an information")
import logging logger = logging.getLogger() logger.info("this is an information")
import logging logger = logging.getLogger('mycustomlogger') logger.info("this is an information")
import logging logger = logging.getLogger(__name__) logger.info("this is an information")
import scrapy class LogSpider(scrapy.Spider): name = 'logspider' start_urls = ['http://dmoz.com'] def parse(self, response): self.logger.info('Parse function called on %s', response.url)
import logging import scrapy logger = logging.getLogger('customizedlogger') class LogSpider(scrapy.Spider): name = 'logspider' start_urls = ['http://dmoz.com'] def parse(self, response): logger.info('Parse function called on %s', response.url)
命令和说明 |
--logfile FILE
覆盖
LOG_FILE
|
--loglevel/-L LEVEL
覆盖
LOG_LEVEL
|
--nolog
设置
LOG_ENABLED 为 False
|
scrapy.utils.log.configure_logging(settings = None, install_root_handler = true)
参数和说明 |
settings (dict, None)
它为根记录器创建和配置处理程序。默认情况下,它是
无。
|
install_root_handler (bool)
它指定安装根日志处理程序。默认情况下,它是
True。
|
import logging from scrapy.utils.log import configure_logging configure_logging(install_root_handler = false) logging.basicConfig ( filename = 'logging.txt', format = '%(levelname)s: %(your_message)s', level = logging.INFO )