Skip to Content
Code ExamplesGo Example

Go Example

This example demonstrates how to use the WebClassifAI API with Go using the standard net/http package.

Basic Example

package main import ( "bytes" "encoding/json" "fmt" "io" "net/http" ) const apiEndpoint = "https://api.classifai.com/v1" type Prediction struct { Tier1 *Category `json:"tier1,omitempty"` Tier2 *Category `json:"tier2,omitempty"` } type Category struct { Name string `json:"name"` Code string `json:"code"` Confidence float64 `json:"confidence"` } type Result struct { URL string `json:"url"` ScrapedSuccessfully bool `json:"scraped_successfully"` Predictions []Prediction `json:"predictions"` } type Response struct { Results []Result `json:"results"` } type Request struct { URLs []string `json:"urls"` APIKey string `json:"api_key"` Taxonomy string `json:"taxonomy"` } func classifyURLs(urls []string, apiKey string, taxonomy string) (*Response, error) { reqBody := Request{ URLs: urls, APIKey: apiKey, Taxonomy: taxonomy, } jsonData, err := json.Marshal(reqBody) if err != nil { return nil, fmt.Errorf("error marshaling request: %v", err) } req, err := http.NewRequest("POST", apiEndpoint, bytes.NewBuffer(jsonData)) if err != nil { return nil, fmt.Errorf("error creating request: %v", err) } req.Header.Set("Content-Type", "application/json") client := &http.Client{} resp, err := client.Do(req) if err != nil { return nil, fmt.Errorf("error making request: %v", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { body, _ := io.ReadAll(resp.Body) return nil, fmt.Errorf("API error: %s - %s", resp.Status, string(body)) } var response Response if err := json.NewDecoder(resp.Body).Decode(&response); err != nil { return nil, fmt.Errorf("error decoding response: %v", err) } return &response, nil } func main() { apiKey := "your_api_key_here" urls := []string{ "https://www.example.com/", "https://www.tech-blog.com/", } response, err := classifyURLs(urls, apiKey, "iab-1.0") if err != nil { fmt.Printf("Error: %v\n", err) return } // Process results for _, result := range response.Results { fmt.Printf("\nURL: %s\n", result.URL) fmt.Printf("Scraped successfully: %v\n", result.ScrapedSuccessfully) for _, prediction := range result.Predictions { fmt.Println("\nPrediction:") if prediction.Tier1 != nil { fmt.Printf("Tier 1: %s (%s) - Confidence: %.2f\n", prediction.Tier1.Name, prediction.Tier1.Code, prediction.Tier1.Confidence) } if prediction.Tier2 != nil { fmt.Printf("Tier 2: %s (%s) - Confidence: %.2f\n", prediction.Tier2.Name, prediction.Tier2.Code, prediction.Tier2.Confidence) } } } }

Error Handling

For production use, implement proper error handling:

func classifyURLs(urls []string, apiKey string, taxonomy string) (*Response, error) { // Add timeout to the client client := &http.Client{ Timeout: 30 * time.Second, } // ... existing code ... // Add retry logic var response *Response var err error for i := 0; i < 3; i++ { response, err = doRequest(client, req) if err == nil { break } time.Sleep(time.Second * time.Duration(i+1)) } return response, err } func doRequest(client *http.Client, req *http.Request) (*Response, error) { resp, err := client.Do(req) if err != nil { return nil, fmt.Errorf("request failed: %v", err) } defer resp.Body.Close() // ... rest of the request handling ... }

Batch Processing

Important: We strongly recommend batching URLs in a single request (up to the 500 URL limit) for optimal performance. Only use async execution when your limits exceed 500 URLs or your architecture specifically requires it.

For optimal performance, send multiple URLs in a single request:

// Process up to 500 URLs in a single request urls := make([]string, 500) for i := range urls { urls[i] = "https://example.com" } response, err := classifyURLs(urls, apiKey, "iab-1.0")

Additional Resources

Last updated on