import Foundation /// Errors that can occur when fetching an Org Social feed. public enum FeedFetcherError: Error, Sendable, Equatable { /// The URL provided is not a valid HTTP/HTTPS URL. case invalidURL /// The server returned a non-2xx HTTP status code. case httpError(statusCode: Int) /// The response body could not be decoded as UTF-8 text. case decodingError /// A network-level error occurred (timeout, no connection, etc.). case networkError(underlying: String) } extension FeedFetcherError: LocalizedError { public var errorDescription: String? { switch self { case .invalidURL: return "The URL is not a valid HTTP or HTTPS address." case .httpError(let code): return "The server returned HTTP \(code)." case .decodingError: return "The feed content could not be decoded as UTF-8 text." case .networkError(let message): return "Network error: \(message)" } } } /// Fetches the raw content of an Org Social feed from a remote URL. public struct FeedFetcher: Sendable { private let session: URLSession public init(session: URLSession = .shared) { self.session = session } /// Downloads the raw text content of a `social.org` file. /// /// - Parameters: /// - url: The public URL of the `social.org` file. /// - bypassCache: When true, sends `Cache-Control: no-cache` and appends a cache-busting /// query parameter to defeat URLSession and CDN caches. Use right after uploads when /// freshness matters more than efficiency. /// - Returns: The raw UTF-8 string content of the feed. /// - Throws: `FeedFetcherError` if the request fails or the response is invalid. public func fetch(from url: URL, bypassCache: Bool = false) async throws -> String { guard url.scheme == "http" || url.scheme == "https" else { throw FeedFetcherError.invalidURL } let requestURL: URL if bypassCache { var comps = URLComponents(url: url, resolvingAgainstBaseURL: false) var items = comps?.queryItems ?? [] items.append(URLQueryItem(name: "_t", value: "\(Int(Date().timeIntervalSince1970 * 1000))")) comps?.queryItems = items requestURL = comps?.url ?? url } else { requestURL = url } var request = URLRequest(url: requestURL) // Cap individual feed fetches so a single unreachable host doesn't stall // aggregate loads like thread resolution (which waits for N parallel feeds). request.timeoutInterval = 10 if bypassCache { request.cachePolicy = .reloadIgnoringLocalAndRemoteCacheData request.setValue("no-cache", forHTTPHeaderField: "Cache-Control") request.setValue("no-cache", forHTTPHeaderField: "Pragma") } let data: Data let response: URLResponse do { (data, response) = try await session.data(for: request) } catch { throw FeedFetcherError.networkError(underlying: error.localizedDescription) } if let http = response as? HTTPURLResponse, !(200..<300).contains(http.statusCode) { throw FeedFetcherError.httpError(statusCode: http.statusCode) } guard let content = String(data: data, encoding: .utf8) else { throw FeedFetcherError.decodingError } return content } }