package main
import (
"fmt"
"log"
"net/http"
"os"
"time"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
"golang.org/x/net/html"
"golang.org/x/net/html/atom"
)
// 初始化日志配置
func initLogger() (*zap.Logger, error) {
encoderConfig := zapcore.EncoderConfig{
TimeKey: "ts",
LevelKey: "level",
NameKey: "logger",
CallerKey: "caller",
MessageKey: "msg",
StacktraceKey: "stacktrace",
LineEnding: zapcore.DefaultLineEnding,
EncodeLevel: zapcore.LowercaseLevelEncoder,
EncodeTime: zapcore.EpochMillisTimeEncoder,
EncodeDuration: zapcore.SecondsDurationEncoder,
}
core := zapcore.NewCore(zapcore.NewJSONEncoder(encoderConfig), os.Stdout, zapcore.InfoLevel)
return zap.New(core), nil
}
// 解析HTML并提取指定的元素
func extractElements(doc *html.Node, targetAtom atom.Atom) []*html.Node {
var elements []*html.Node
var f func(*html.Node)
f = func(n *html.Node) {
if n.Type == html.ElementNode && n.DataAtom == targetAtom {
elements = append(elements, n)
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
f(c)
}
}
f(doc)
return elements
}
// 爬取指定URL的页面内容
func crawlPage(logger *zap.Logger, url string) (string, error) {
client := &http.Client{Timeout: 10 * time.Second}
resp, err := client.Get(url)
if err != nil {
logger.Error("Error fetching URL", zap.Error(err), zap.String("url", url))
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
logger.Error("Received non-200 HTTP status code", zap.Int("status_code", resp.StatusCode), zap.String("url", url))
return "", fmt.Errorf("non-200 status code: %d", resp.StatusCode)
}
doc, err := html.Parse(resp.Body)
if err != nil {
logger.Error("Error parsing HTML", zap.Error(err), zap.String("url", url))
return "", err
}
// 假设我们要提取所有的h1元素
h1Elements := extractElements(doc, atom.H1)
var content string
for _, elem := range h1Elements {
for _, n := range elem.ChildNodes {
content += n.Data
}
}
return content, nil
}
func main() {
logger, err := initLogger()
if err != nil {
log.Fatalf("Error initializing logger: %v", err)
}
defer logger.Sync()
content, err := crawlPage(logger, "https://example.com")
if err != nil {
logger.Error("Crawling failed", zap.E
评论已关闭