代码拉取完成,页面将自动刷新
package crawler
import (
	"fmt"
	"os"
	"github.com/elastic/beats/filebeat/config"
	"github.com/elastic/beats/filebeat/input"
	"github.com/elastic/beats/libbeat/logp"
)
/*
 The hierarchy for the crawler objects is explained as following
 Crawler: Filebeat has one crawler. The crawler is the single point of control
 	and stores the state. The state is written through the registrar
 Prospector: For every FileConfig the crawler starts a prospector
 Harvester: For every file found inside the FileConfig, the Prospector starts a Harvester
 		The harvester send their events to the spooler
 		The spooler sends the event to the publisher
 		The publisher writes the state down with the registrar
*/
type Crawler struct {
	// Registrar object to persist the state
	Registrar *Registrar
	running   bool
}
func (crawler *Crawler) Start(files []config.ProspectorConfig, eventChan chan *input.FileEvent) {
	pendingProspectorCnt := 0
	crawler.running = true
	// Prospect the globs/paths given on the command line and launch harvesters
	for _, fileconfig := range files {
		logp.Debug("prospector", "File Configs: %v", fileconfig.Paths)
		prospector := &Prospector{
			ProspectorConfig: fileconfig,
			registrar:        crawler.Registrar,
		}
		err := prospector.Init()
		if err != nil {
			logp.Critical("Error in initing prospector: %s", err)
			fmt.Printf("Error in initing prospector: %s", err)
			os.Exit(1)
		}
		go prospector.Run(eventChan)
		pendingProspectorCnt++
	}
	// Now determine which states we need to persist by pulling the events from the prospectors
	// When we hit a nil source a prospector had finished so we decrease the expected events
	logp.Debug("prospector", "Waiting for %d prospectors to initialise", pendingProspectorCnt)
	for event := range crawler.Registrar.Persist {
		if event.Source == nil {
			pendingProspectorCnt--
			if pendingProspectorCnt == 0 {
				logp.Debug("prospector", "No pending prospectors. Finishing setup")
				break
			}
			continue
		}
		crawler.Registrar.State[*event.Source] = event
		logp.Debug("prospector", "Registrar will re-save state for %s", *event.Source)
		if !crawler.running {
			break
		}
	}
	logp.Info("All prospectors initialised with %d states to persist", len(crawler.Registrar.State))
}
func (crawler *Crawler) Stop() {
	// TODO: Properly stop prospectors and harvesters
}
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。
 马建仓 AI 助手
马建仓 AI 助手