using System.Text.RegularExpressions; namespace Backend.Helper; public static partial class HttpClientHelper { // Reddit, for example, will block the GET request if you don't have a user agent. private const string UserAgentHeader = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36"; public static async Task HasRobotsTxt(string url, int port) { using HttpClient client = new(); if (port == 80) { try { client.BaseAddress = new($"http://{url}"); } catch { // } } else { try { client.BaseAddress = new($"https://{url}"); } catch { // } } client.DefaultRequestHeaders.Accept.Clear(); client.DefaultRequestHeaders.UserAgent.ParseAdd(UserAgentHeader); client.Timeout = TimeSpan.FromSeconds(30); HttpResponseMessage? response = null; try { response = await client.SendAsync(new(HttpMethod.Head, "/robots.txt")); } catch { // } return response is not null && response.IsSuccessStatusCode; } }