parzival-au20 / Node-JS

0 stars 0 forks source link

test #1

Open parzival-au20 opened 4 months ago

parzival-au20 commented 4 months ago

import got from 'got'; import {HttpProxyAgent} from "hpagent";

for(let i = 0;i<30;i++){ const response = await got("https://www.youtube.com/", { agent:{ http : new HttpProxyAgent({ proxy : http://country-TR,groups-RESIDENTIAL:apify_proxy_S4SvG4Mq3M9boAVDli5S0kUJjt4NdV2WmFMC@proxy.apify.com:8000, }), }, });

console.log(response.ip);

}

simsektepede380606 commented 4 months ago

import got from 'got'; import {HttpProxyAgent} from "hpagent"; import fs from "fs";

const certPath = "../youtube.crt"

const cert = fs.readFileSync(certPath);

for(let i = 0;i<10;i++){ const response = await got("https://www.youtube.com/", { agent:{ http : new HttpProxyAgent({ proxy : http://country-TR,groups-RESIDENTIAL:apify_proxy_S4SvG4Mq3M9boAVDli5S0kUJjt4NdV2WmFMC@proxy.apify.com:8000, }), https : new HttpProxyAgent({ proxy : http://country-TR,groups-RESIDENTIAL:apify_proxy_S4SvG4Mq3M9boAVDli5S0kUJjt4NdV2WmFMC@proxy.apify.com:8000, }), }, https:{ certificate : cert } });

console.log(response.ip);

}

simsektepede380606 commented 4 months ago

node:internal/process/esm_loader:97 internalBinding('errors').triggerUncaughtException( ^

RequestError: unable to verify the first certificate at ClientRequest. (file:///c:/Users/s23283/Desktop/proxy/node_modules/got/dist/source/core/index.js:670:107) at Object.onceWrapper (node:events:628:26) at ClientRequest.emit (node:events:525:35) at TLSSocket.socketErrorListener (node:_http_client:502:9) at TLSSocket.emit (node:events:513:28) at emitErrorNT (node:internal/streams/destroy:151:8) at emitErrorCloseNT (node:internal/streams/destroy:116:3) at process.processTicksAndRejections (node:internal/process/task_queues:82:21) at TLSSocket.onConnectSecure (node:_tls_wrap:1540:34) ... 2 lines matching cause stack trace ... at ssl.onhandshakedone (node:_tls_wrap:743:12) { input: undefined,

parzival-au20 commented 4 months ago

import requests, asyncio from apify import Actor from bs4 import BeautifulSoup

proxy_configuration = await Actor.create_proxy_configuration() proxy_url = await proxy_configuration.new_url()

Proxy sunucusu URL'si

proxy_url = 'http://country-TR,groups-RESIDENTIAL:apify_proxy_S4SvG4Mq3M9boAVDli5S0kUJjt4NdV2WmFMC@proxy.apify.com:8000'

headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/58.0.3029.110 Safari/537.3"}

Proxy ayarları

proxies = { 'http': proxy_url, 'https': proxy_url, }

URL

url = 'https://www.youtube.com/'

İstek gönderme fonksiyonu

def fetch_with_proxy(url, proxies): for i in range(5): try: response = requests.get(url, headers=headers, proxies=proxies, verify="./TUSAS.crt", timeout=5) print(f'Response {i + 1}:', response.status_code) except requests.RequestException as e: print(f'Error on request {i + 1}:', e)

İstekleri gönder

fetch_with_proxy(url, proxies)

parzival-au20 commented 4 months ago

Error on request 1: HTTPSConnectionPool(host='www.youtube.com', port=443): Max retries exceeded with url: / (Caused by ProxyError('Unable to connect to proxy', OSError('Tunnel connection failed: 403 Forbidden'))) Error on request 2: HTTPSConnectionPool(host='www.youtube.com', port=443): Max retries exceeded with url: / (Caused by ProxyError('Unable to connect to proxy', OSError('Tunnel connection failed: 403 Forbidden'))) Error on request 3: HTTPSConnectionPool(host='www.youtube.com', port=443): Max retries exceeded with url: / (Caused by ProxyError('Unable to connect to proxy', OSError('Tunnel connection failed: 403 Forbidden'))) Error on request 4: HTTPSConnectionPool(host='www.youtube.com', port=443): Max retries exceeded with url: / (Caused by ProxyError('Unable to connect to proxy', OSError('Tunnel connection failed: 403 Forbidden'))) Error on request 5: HTTPSConnectionPool(host='www.youtube.com', port=443): Max retries exceeded with url: / (Caused by ProxyError('Unable to connect to proxy', OSError('Tunnel connection failed: 403 Forbidden')))

parzival-au20 commented 4 months ago

curl 'https://sandbox.oxylabs.io/products/' -U 'Sefademirci:**' -x 'unblock.oxylabs.io:60000' -H 'x-oxylabs-geo-location: United States' -k

parzival-au20 commented 4 months ago

python test.py Actor failed with an exception Traceback (most recent call last): File "/workspaces/Node-JS/test.py", line 6, in main proxy_configuration = await Actor.create_proxy_configuration(password="apify_proxy_S4SvG4Mq3M9boAVDli5S0kUJjt4NdV2WmFMC") File "/usr/local/python/3.10.13/lib/python3.10/site-packages/apify/actor.py", line 1309, in create_proxy_configuration return await cls._get_default_instance().create_proxy_configuration( File "/usr/local/python/3.10.13/lib/python3.10/site-packages/apify/actor.py", line 1349, in _create_proxy_configuration_internal await proxy_configuration.initialize() File "/usr/local/python/3.10.13/lib/python3.10/site-packages/apify/proxy_configuration.py", line 206, in initialize await self._check_access() File "/usr/local/python/3.10.13/lib/python3.10/site-packages/apify/proxy_configuration.py", line 340, in _check_access raise ConnectionError(status['connectionError']) ConnectionError: The "Proxy external access" feature is not enabled for your account. Please upgrade your plan or contact support@apify.com

parzival-au20 commented 4 months ago

import requests from bs4 import BeautifulSoup from itertools import cycle

Proxy listesi

proxies = [ 'http://proxy1.example.com:port', 'http://proxy2.example.com:port',

Diğer proxy adresleri...

]

proxy_pool = cycle(proxies)

Başlangıç URL'si

start_url = 'https://example.com'

def crawl_with_proxy(url, proxy): try:

Proxy ile HTTP isteği yap

    response = requests.get(url, proxies={'http': proxy, 'https': proxy})
    response.raise_for_status()  # İstek başarısız olursa hata fırlat

    # HTML içeriğini BeautifulSoup ile yükle
    soup = BeautifulSoup(response.text, 'html.parser')

    # Sayfadaki tüm bağlantıları topla
    links = [a['href'] for a in soup.find_all('a', href=True)]
    print(f'Taradığım URL: {url}')
    print('Bağlantılar:', links)
except requests.RequestException as e:
    print(f'URL taranırken hata oluştu: {url}', e)

Tarama işlemi

for i in range(10): # Örnek olarak sadece 10 kez dönüyoruz proxy = next(proxy_pool) crawl_with_proxy(start_url, proxy)