AlanMartines / monitoramento-sefaz-zabbix-grafana

Monitoramento do status dos serviços da Sefaz via Zabbix e Grafana.
MIT License
0 stars 0 forks source link

Problemas com Cloudscraper #1

Closed VQFCOSTA-Infortech closed 2 weeks ago

VQFCOSTA-Infortech commented 3 weeks ago

Problema:

root@CT[/usr/lib/zabbix/externalscripts]# ./sefaznfe.py.bkp http://www.nfe.fazenda.gov.br/portal/disponibilidade.aspx MG SERVICO
Traceback (most recent call last):
  File "./sefaznfe.py.bkp", line 130, in <module>
    main(url, estado, status)
  File "./sefaznfe.py.bkp", line 74, in main
    response = request(url)
  File "./sefaznfe.py.bkp", line 37, in request
    scraper = cloudscraper.create_scraper()
  File "/usr/local/lib/python3.6/dist-packages/cloudscraper/__init__.py", line 319, in create_scraper
    scraper = cls(**kwargs)
  File "/usr/local/lib/python3.6/dist-packages/cloudscraper/__init__.py", line 173, in __init__
    ssl_context=self.ssl_context
  File "/usr/local/lib/python3.6/dist-packages/cloudscraper/__init__.py", line 77, in __init__
    self.ssl_context.orig_wrap_socket = self.ssl_context.wrap_socket
AttributeError: 'SSLContext' object has no attribute 'orig_wrap_socket'
root@CT[/usr/lib/zabbix/externalscripts]#

Solução:

pip install cfscrape requests beautifulsoup4

sefaznfe.py

#!/usr/bin/env python3

import sys
import random
from bs4 import BeautifulSoup
import requests

user_agent_list = [
    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36',
    'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36',
    'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.101 Safari/537.36',
    'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0',
    'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:89.0) Gecko/20100101 Firefox/89.0',
    'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:89.0) Gecko/20100101 Firefox/89.0',
    'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15',
    'Mozilla/5.0 (iPhone; CPU iPhone OS 14_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0 Mobile/15E148 Safari/604.1',
    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36 Edg/91.0.864.64',
    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36 OPR/77.0.4054.172',
]

def request(url):
    headers = {
        'User-Agent': random.choice(user_agent_list),
        'Accept-Language': 'en-US,en;q=0.9',
        'Accept-Encoding': 'gzip, deflate, br',
        'Connection': 'keep-alive',
        'DNT': '1',
        'Upgrade-Insecure-Requests': '1'
    }
    response = requests.get(url, headers=headers)
    return response

def consultar_servico(status):
    if status == "imagens/bola_verde_P.png":
        return 1  # DISPONIVEL
    elif status == "imagens/bola_amarela_P.png":
        return 2  # INDISPONIVEL
    elif status == "imagens/bola_vermelho_P.png":
        return 0  # OFFLINE
    elif status == "-" or status == "":
        return 5  # SEM DADOS
    else:
        return 5  # SEM DADOS

def obter_status(soup, estado, posicao):
    row = soup.find('td', text=estado)
    if row:
        cells = row.find_parent('tr').find_all('td')
        if posicao < len(cells):
            img = cells[posicao].find('img')
            if img:
                return img['src']
            else:
                return cells[posicao].text.strip()
    return None

def main(url, estado, status):
    response = request(url)

    if response.status_code != 200:
        print(5)
        sys.exit()

    try:
        soup = BeautifulSoup(response.text, 'html.parser')
    except Exception as err:
        print("Falha ao processar a página.", err)
        sys.exit()

    status_map = {
        "AUTORIZACAO": 1,
        "RETORNO.AUT": 2,
        "INUTILIZACAO": 3,
        "CONSULTA.PROTOCOLO": 4,
        "SERVICO": 5,
        "TEMPO.MED": 6,
        "CONSULTA.CADASTRO": 7,
        "RECEPCAO.EVENTO": 8
    }

    posicao = status_map.get(status)
    if posicao is None:
        print(5)  # SEM DADOS
        return

    status_img = obter_status(soup, estado, posicao)
    if status_img:
        if status == "TEMPO.MED":
            try:
                tempo_medio = int(status_img)
                if tempo_medio < 200:
                    print(1)  # DISPONIVEL
                elif 200 <= tempo_medio < 1000:
                    print(2)  # INTERMITENTE
                else:
                    print(0)  # CRITICO
            except ValueError:
                print(5)  # SEM DADOS
        else:
            print(consultar_servico(status_img))
    else:
        print(5)  # SEM DADOS

if __name__ == "__main__":
    if len(sys.argv) != 4:
        print(5)
        sys.exit()

    url = sys.argv[1]
    estado = sys.argv[2]
    status = sys.argv[3]
    main(url, estado, status)

SCRIPT COMPLETO CORRIGIDO, APENAS COPIE

AlanMartines commented 2 weeks ago

Bom dia,

Script atualizado, obrigado.