Open parzival-au20 opened 4 months ago
import got from 'got'; import {HttpProxyAgent} from "hpagent"; import fs from "fs";
const certPath = "../youtube.crt"
const cert = fs.readFileSync(certPath);
for(let i = 0;i<10;i++){
const response = await got("https://www.youtube.com/", {
agent:{
http : new HttpProxyAgent({
proxy : http://country-TR,groups-RESIDENTIAL:apify_proxy_S4SvG4Mq3M9boAVDli5S0kUJjt4NdV2WmFMC@proxy.apify.com:8000
,
}),
https : new HttpProxyAgent({
proxy : http://country-TR,groups-RESIDENTIAL:apify_proxy_S4SvG4Mq3M9boAVDli5S0kUJjt4NdV2WmFMC@proxy.apify.com:8000
,
}),
},
https:{
certificate : cert
}
});
console.log(response.ip);
}
node:internal/process/esm_loader:97 internalBinding('errors').triggerUncaughtException( ^
RequestError: unable to verify the first certificate
at ClientRequest.
import requests, asyncio from apify import Actor from bs4 import BeautifulSoup
proxy_configuration = await Actor.create_proxy_configuration() proxy_url = await proxy_configuration.new_url()
proxy_url = 'http://country-TR,groups-RESIDENTIAL:apify_proxy_S4SvG4Mq3M9boAVDli5S0kUJjt4NdV2WmFMC@proxy.apify.com:8000'
headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/58.0.3029.110 Safari/537.3"}
proxies = { 'http': proxy_url, 'https': proxy_url, }
url = 'https://www.youtube.com/'
def fetch_with_proxy(url, proxies): for i in range(5): try: response = requests.get(url, headers=headers, proxies=proxies, verify="./TUSAS.crt", timeout=5) print(f'Response {i + 1}:', response.status_code) except requests.RequestException as e: print(f'Error on request {i + 1}:', e)
fetch_with_proxy(url, proxies)
Error on request 1: HTTPSConnectionPool(host='www.youtube.com', port=443): Max retries exceeded with url: / (Caused by ProxyError('Unable to connect to proxy', OSError('Tunnel connection failed: 403 Forbidden'))) Error on request 2: HTTPSConnectionPool(host='www.youtube.com', port=443): Max retries exceeded with url: / (Caused by ProxyError('Unable to connect to proxy', OSError('Tunnel connection failed: 403 Forbidden'))) Error on request 3: HTTPSConnectionPool(host='www.youtube.com', port=443): Max retries exceeded with url: / (Caused by ProxyError('Unable to connect to proxy', OSError('Tunnel connection failed: 403 Forbidden'))) Error on request 4: HTTPSConnectionPool(host='www.youtube.com', port=443): Max retries exceeded with url: / (Caused by ProxyError('Unable to connect to proxy', OSError('Tunnel connection failed: 403 Forbidden'))) Error on request 5: HTTPSConnectionPool(host='www.youtube.com', port=443): Max retries exceeded with url: / (Caused by ProxyError('Unable to connect to proxy', OSError('Tunnel connection failed: 403 Forbidden')))
curl 'https://sandbox.oxylabs.io/products/' -U 'Sefademirci:**' -x 'unblock.oxylabs.io:60000' -H 'x-oxylabs-geo-location: United States' -k
python test.py Actor failed with an exception Traceback (most recent call last): File "/workspaces/Node-JS/test.py", line 6, in main proxy_configuration = await Actor.create_proxy_configuration(password="apify_proxy_S4SvG4Mq3M9boAVDli5S0kUJjt4NdV2WmFMC") File "/usr/local/python/3.10.13/lib/python3.10/site-packages/apify/actor.py", line 1309, in create_proxy_configuration return await cls._get_default_instance().create_proxy_configuration( File "/usr/local/python/3.10.13/lib/python3.10/site-packages/apify/actor.py", line 1349, in _create_proxy_configuration_internal await proxy_configuration.initialize() File "/usr/local/python/3.10.13/lib/python3.10/site-packages/apify/proxy_configuration.py", line 206, in initialize await self._check_access() File "/usr/local/python/3.10.13/lib/python3.10/site-packages/apify/proxy_configuration.py", line 340, in _check_access raise ConnectionError(status['connectionError']) ConnectionError: The "Proxy external access" feature is not enabled for your account. Please upgrade your plan or contact support@apify.com
import requests from bs4 import BeautifulSoup from itertools import cycle
proxies = [ 'http://proxy1.example.com:port', 'http://proxy2.example.com:port',
]
proxy_pool = cycle(proxies)
start_url = 'https://example.com'
def crawl_with_proxy(url, proxy): try:
response = requests.get(url, proxies={'http': proxy, 'https': proxy})
response.raise_for_status() # İstek başarısız olursa hata fırlat
# HTML içeriğini BeautifulSoup ile yükle
soup = BeautifulSoup(response.text, 'html.parser')
# Sayfadaki tüm bağlantıları topla
links = [a['href'] for a in soup.find_all('a', href=True)]
print(f'Taradığım URL: {url}')
print('Bağlantılar:', links)
except requests.RequestException as e:
print(f'URL taranırken hata oluştu: {url}', e)
for i in range(10): # Örnek olarak sadece 10 kez dönüyoruz proxy = next(proxy_pool) crawl_with_proxy(start_url, proxy)
import got from 'got'; import {HttpProxyAgent} from "hpagent";
for(let i = 0;i<30;i++){ const response = await got("https://www.youtube.com/", { agent:{ http : new HttpProxyAgent({ proxy :
http://country-TR,groups-RESIDENTIAL:apify_proxy_S4SvG4Mq3M9boAVDli5S0kUJjt4NdV2WmFMC@proxy.apify.com:8000
, }), }, });}