Integração
Código
curl --proxy host:port --proxy-user username:password API_URL
import requests
username = 'username'
password = 'password'
host = 'gw-us.scrapeless.io'
port = '8789'
proxy = f'http://{username}:{password}@{host}:{port}'
proxy_dict = {
"http": proxy,
"https": proxy
}
response = requests.get("API_URL", proxies=proxy_dict)
print(response.text)
const axios = require("axios");
const url = "API_URL";
axios.get(url, {
proxy: {
protocol: 'http',
host: 'gw-us.scrapeless.io',
port: 8789,
auth: {
username: 'username',
password: 'password',
},
},
})
.then((res) => {
console.log(res.data);
})
.catch((err) => {
console.log('[err]:', err);
});
console.log(resp.data);
package main
import (
"fmt"
"io/ioutil"
"net/http"
"net/url"
)
func main() {
username := "username"
password := "password"
host := "gw-us.scrapeless.io"
port := 8789
proxyURL := fmt.Sprintf("http://%s:%s@%s:%v", username, password, host, port)
proxy, _ := url.Parse(proxyURL)
client := &http.Client{
Transport: &http.Transport{
Proxy: http.ProxyURL(proxy),
},
}
req, err := http.NewRequest("GET", "API_URL", nil)
if err != nil {
panic(err)
}
response, err := client.Do(req)
if err != nil {
panic(err)
}
defer response.Body.Close()
body, err := ioutil.ReadAll(response.Body)
if err != nil {
panic(err)
}
fmt.Println(string(body))
}
<?php
$username = 'USERNAME';
$password = 'PASSWORD';
$proxy = 'gw-us.scrapeless.io:8789';
$query = curl_init('API_URL');
curl_setopt($query, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($query, CURLOPT_PROXY, "http://$proxy");
curl_setopt($query, CURLOPT_PROXYUSERPWD, "$username:$password");
$output = curl_exec($query);
curl_close($query);
if ($output)
echo $output;
?>
package example;
import org.apache.http.HttpHost;
import org.apache.http.client.fluent.*;
public class Example {
public static void main(String[] args) throws Exception {
HttpHost entry = new HttpHost("gw-us.scrapeless.io", 8789);
String query = Executor.newInstance()
.auth(entry, "username", "password")
.execute(Request.Get("API_URL").viaProxy(entry))
.returnContent().asString();
System.out.println(query);
}
}
require "uri"
require 'net/http'
proxy_host = 'gw-us.scrapeless.io'
proxy_port = 8789
proxy_user = 'username'
proxy_pass = 'password'
uri = URI.parse('API_URL')
proxy = Net::HTTP::Proxy(proxy_host, proxy_port, proxy_user, proxy_pass)
req = Net::HTTP::Get.new(uri.path)
result = proxy.start(uri.host, uri.port) do |http|
http.request(req)
end
puts result.body
#[tokio::main]
async fn main() {
let http_proxy = reqwest::Proxy::http("http://gw-us.scrapeless.io:8789")
.unwrap().basic_auth("user", "password");
let client = reqwest::ClientBuilder::new().proxy(http_proxy).build().unwrap();
let resp = client.get("API_URL").send().await.unwrap();
println!("{:?}", resp.text().await.unwrap());
}
Navegador
import { Scrapeless } from '@scrapeless-ai/sdk';
const client = new Scrapeless({
apiKey: 'YOUR_API_KEY'
});
(async () => {
const browser = await client.browser.connect({
session_name: 'session_name',
session_ttl: 180,
proxy_country: 'US',
session_recording: true,
defaultViewport: null
});
const page = await browser.newPage();
await page.goto('API_URL');
await browser.close();
})()
from seleniumbase import Driver
proxy = 'username:password@gw-us.scrapeless.com:8789'
driver = Driver(browser="chrome", headless=False, proxy=proxy)
driver.get("API_URL")
driver.quit()
const puppeteer =require('puppeteer');
(async() => {
const proxyUrl = 'http://gw-us.scrapeless.com:8789';
const username = 'username';
const password = 'password';
const browser = await puppeteer.launch({
args: [`--proxy-server=${proxyUrl}`],
headless: false
});
const page = await browser.newPage();
await page.authenticate({ username, password });
await page.goto('API_URL');
await browser.close();
})();