代码拉取完成,页面将自动刷新
package elasticsearch
import (
"fmt"
"net/url"
"strings"
)
func addToURL(url, path, pipeline string, params map[string]string) string {
if strings.HasSuffix(url, "/") && strings.HasPrefix(path, "/") {
url = strings.TrimSuffix(url, "/")
}
if len(params) == 0 && pipeline == "" {
return url + path
}
return strings.Join([]string{
url, path, "?", urlEncode(pipeline, params),
}, "")
}
// Encode parameters in url
func urlEncode(pipeline string, params map[string]string) string {
values := url.Values{}
for key, val := range params {
values.Add(key, string(val))
}
if pipeline != "" {
values.Add("pipeline", pipeline)
}
return values.Encode()
}
// Create path out of index, docType and id that is used for querying Elasticsearch
func makePath(index string, docType string, id string) (string, error) {
var path string
if len(docType) > 0 {
if len(id) > 0 {
path = fmt.Sprintf("/%s/%s/%s", index, docType, id)
} else {
path = fmt.Sprintf("/%s/%s", index, docType)
}
} else {
if len(id) > 0 {
if len(index) > 0 {
path = fmt.Sprintf("/%s/%s", index, id)
} else {
path = fmt.Sprintf("/%s", id)
}
} else {
path = fmt.Sprintf("/%s", index)
}
}
return path, nil
}
// TODO: make this reusable. Same definition in elasticsearch monitoring module
func parseProxyURL(raw string) (*url.URL, error) {
if raw == "" {
return nil, nil
}
url, err := url.Parse(raw)
if err == nil && strings.HasPrefix(url.Scheme, "http") {
return url, err
}
// Proxy was bogus. Try prepending "http://" to it and
// see if that parses correctly.
return url.Parse("http://" + raw)
}
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。