plugin/etcdv3: Add etcd v3 plugin (#1702)
* Update dependencies and add etcdv3 client * Update etcd plugin to support etcd v3 clients Fixes #341
This commit is contained in:
parent
f3afd70021
commit
6fe27d99be
10327 changed files with 4196998 additions and 82 deletions
3380
vendor/github.com/prometheus/client_model/cpp/metrics.pb.cc
generated
vendored
Normal file
3380
vendor/github.com/prometheus/client_model/cpp/metrics.pb.cc
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
2072
vendor/github.com/prometheus/client_model/cpp/metrics.pb.h
generated
vendored
Normal file
2072
vendor/github.com/prometheus/client_model/cpp/metrics.pb.h
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
12
vendor/github.com/prometheus/client_model/python/prometheus/__init__.py
generated
vendored
Normal file
12
vendor/github.com/prometheus/client_model/python/prometheus/__init__.py
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
# Copyright 2013 Prometheus Team
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
12
vendor/github.com/prometheus/client_model/python/prometheus/client/__init__.py
generated
vendored
Normal file
12
vendor/github.com/prometheus/client_model/python/prometheus/client/__init__.py
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
# Copyright 2013 Prometheus Team
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
14
vendor/github.com/prometheus/client_model/python/prometheus/client/model/__init__.py
generated
vendored
Normal file
14
vendor/github.com/prometheus/client_model/python/prometheus/client/model/__init__.py
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Copyright 2013 Prometheus Team
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
__all__ = ['metrics_pb2']
|
575
vendor/github.com/prometheus/client_model/python/prometheus/client/model/metrics_pb2.py
generated
vendored
Normal file
575
vendor/github.com/prometheus/client_model/python/prometheus/client/model/metrics_pb2.py
generated
vendored
Normal file
|
@ -0,0 +1,575 @@
|
|||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: metrics.proto
|
||||
|
||||
import sys
|
||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
||||
from google.protobuf.internal import enum_type_wrapper
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf import descriptor_pb2
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='metrics.proto',
|
||||
package='io.prometheus.client',
|
||||
serialized_pb=_b('\n\rmetrics.proto\x12\x14io.prometheus.client\"(\n\tLabelPair\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\x16\n\x05Gauge\x12\r\n\x05value\x18\x01 \x01(\x01\"\x18\n\x07\x43ounter\x12\r\n\x05value\x18\x01 \x01(\x01\"+\n\x08Quantile\x12\x10\n\x08quantile\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01\"e\n\x07Summary\x12\x14\n\x0csample_count\x18\x01 \x01(\x04\x12\x12\n\nsample_sum\x18\x02 \x01(\x01\x12\x30\n\x08quantile\x18\x03 \x03(\x0b\x32\x1e.io.prometheus.client.Quantile\"\x18\n\x07Untyped\x12\r\n\x05value\x18\x01 \x01(\x01\"c\n\tHistogram\x12\x14\n\x0csample_count\x18\x01 \x01(\x04\x12\x12\n\nsample_sum\x18\x02 \x01(\x01\x12,\n\x06\x62ucket\x18\x03 \x03(\x0b\x32\x1c.io.prometheus.client.Bucket\"7\n\x06\x42ucket\x12\x18\n\x10\x63umulative_count\x18\x01 \x01(\x04\x12\x13\n\x0bupper_bound\x18\x02 \x01(\x01\"\xbe\x02\n\x06Metric\x12.\n\x05label\x18\x01 \x03(\x0b\x32\x1f.io.prometheus.client.LabelPair\x12*\n\x05gauge\x18\x02 \x01(\x0b\x32\x1b.io.prometheus.client.Gauge\x12.\n\x07\x63ounter\x18\x03 \x01(\x0b\x32\x1d.io.prometheus.client.Counter\x12.\n\x07summary\x18\x04 \x01(\x0b\x32\x1d.io.prometheus.client.Summary\x12.\n\x07untyped\x18\x05 \x01(\x0b\x32\x1d.io.prometheus.client.Untyped\x12\x32\n\thistogram\x18\x07 \x01(\x0b\x32\x1f.io.prometheus.client.Histogram\x12\x14\n\x0ctimestamp_ms\x18\x06 \x01(\x03\"\x88\x01\n\x0cMetricFamily\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04help\x18\x02 \x01(\t\x12.\n\x04type\x18\x03 \x01(\x0e\x32 .io.prometheus.client.MetricType\x12,\n\x06metric\x18\x04 \x03(\x0b\x32\x1c.io.prometheus.client.Metric*M\n\nMetricType\x12\x0b\n\x07\x43OUNTER\x10\x00\x12\t\n\x05GAUGE\x10\x01\x12\x0b\n\x07SUMMARY\x10\x02\x12\x0b\n\x07UNTYPED\x10\x03\x12\r\n\tHISTOGRAM\x10\x04\x42\x16\n\x14io.prometheus.client')
|
||||
)
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
_METRICTYPE = _descriptor.EnumDescriptor(
|
||||
name='MetricType',
|
||||
full_name='io.prometheus.client.MetricType',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
values=[
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='COUNTER', index=0, number=0,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='GAUGE', index=1, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='SUMMARY', index=2, number=2,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='UNTYPED', index=3, number=3,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='HISTOGRAM', index=4, number=4,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
containing_type=None,
|
||||
options=None,
|
||||
serialized_start=923,
|
||||
serialized_end=1000,
|
||||
)
|
||||
_sym_db.RegisterEnumDescriptor(_METRICTYPE)
|
||||
|
||||
MetricType = enum_type_wrapper.EnumTypeWrapper(_METRICTYPE)
|
||||
COUNTER = 0
|
||||
GAUGE = 1
|
||||
SUMMARY = 2
|
||||
UNTYPED = 3
|
||||
HISTOGRAM = 4
|
||||
|
||||
|
||||
|
||||
_LABELPAIR = _descriptor.Descriptor(
|
||||
name='LabelPair',
|
||||
full_name='io.prometheus.client.LabelPair',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='name', full_name='io.prometheus.client.LabelPair.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='value', full_name='io.prometheus.client.LabelPair.value', index=1,
|
||||
number=2, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=39,
|
||||
serialized_end=79,
|
||||
)
|
||||
|
||||
|
||||
_GAUGE = _descriptor.Descriptor(
|
||||
name='Gauge',
|
||||
full_name='io.prometheus.client.Gauge',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='value', full_name='io.prometheus.client.Gauge.value', index=0,
|
||||
number=1, type=1, cpp_type=5, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=81,
|
||||
serialized_end=103,
|
||||
)
|
||||
|
||||
|
||||
_COUNTER = _descriptor.Descriptor(
|
||||
name='Counter',
|
||||
full_name='io.prometheus.client.Counter',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='value', full_name='io.prometheus.client.Counter.value', index=0,
|
||||
number=1, type=1, cpp_type=5, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=105,
|
||||
serialized_end=129,
|
||||
)
|
||||
|
||||
|
||||
_QUANTILE = _descriptor.Descriptor(
|
||||
name='Quantile',
|
||||
full_name='io.prometheus.client.Quantile',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='quantile', full_name='io.prometheus.client.Quantile.quantile', index=0,
|
||||
number=1, type=1, cpp_type=5, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='value', full_name='io.prometheus.client.Quantile.value', index=1,
|
||||
number=2, type=1, cpp_type=5, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=131,
|
||||
serialized_end=174,
|
||||
)
|
||||
|
||||
|
||||
_SUMMARY = _descriptor.Descriptor(
|
||||
name='Summary',
|
||||
full_name='io.prometheus.client.Summary',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='sample_count', full_name='io.prometheus.client.Summary.sample_count', index=0,
|
||||
number=1, type=4, cpp_type=4, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='sample_sum', full_name='io.prometheus.client.Summary.sample_sum', index=1,
|
||||
number=2, type=1, cpp_type=5, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='quantile', full_name='io.prometheus.client.Summary.quantile', index=2,
|
||||
number=3, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=176,
|
||||
serialized_end=277,
|
||||
)
|
||||
|
||||
|
||||
_UNTYPED = _descriptor.Descriptor(
|
||||
name='Untyped',
|
||||
full_name='io.prometheus.client.Untyped',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='value', full_name='io.prometheus.client.Untyped.value', index=0,
|
||||
number=1, type=1, cpp_type=5, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=279,
|
||||
serialized_end=303,
|
||||
)
|
||||
|
||||
|
||||
_HISTOGRAM = _descriptor.Descriptor(
|
||||
name='Histogram',
|
||||
full_name='io.prometheus.client.Histogram',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='sample_count', full_name='io.prometheus.client.Histogram.sample_count', index=0,
|
||||
number=1, type=4, cpp_type=4, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='sample_sum', full_name='io.prometheus.client.Histogram.sample_sum', index=1,
|
||||
number=2, type=1, cpp_type=5, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='bucket', full_name='io.prometheus.client.Histogram.bucket', index=2,
|
||||
number=3, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=305,
|
||||
serialized_end=404,
|
||||
)
|
||||
|
||||
|
||||
_BUCKET = _descriptor.Descriptor(
|
||||
name='Bucket',
|
||||
full_name='io.prometheus.client.Bucket',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='cumulative_count', full_name='io.prometheus.client.Bucket.cumulative_count', index=0,
|
||||
number=1, type=4, cpp_type=4, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='upper_bound', full_name='io.prometheus.client.Bucket.upper_bound', index=1,
|
||||
number=2, type=1, cpp_type=5, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=406,
|
||||
serialized_end=461,
|
||||
)
|
||||
|
||||
|
||||
_METRIC = _descriptor.Descriptor(
|
||||
name='Metric',
|
||||
full_name='io.prometheus.client.Metric',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='label', full_name='io.prometheus.client.Metric.label', index=0,
|
||||
number=1, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='gauge', full_name='io.prometheus.client.Metric.gauge', index=1,
|
||||
number=2, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='counter', full_name='io.prometheus.client.Metric.counter', index=2,
|
||||
number=3, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='summary', full_name='io.prometheus.client.Metric.summary', index=3,
|
||||
number=4, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='untyped', full_name='io.prometheus.client.Metric.untyped', index=4,
|
||||
number=5, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='histogram', full_name='io.prometheus.client.Metric.histogram', index=5,
|
||||
number=7, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='timestamp_ms', full_name='io.prometheus.client.Metric.timestamp_ms', index=6,
|
||||
number=6, type=3, cpp_type=2, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=464,
|
||||
serialized_end=782,
|
||||
)
|
||||
|
||||
|
||||
_METRICFAMILY = _descriptor.Descriptor(
|
||||
name='MetricFamily',
|
||||
full_name='io.prometheus.client.MetricFamily',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='name', full_name='io.prometheus.client.MetricFamily.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='help', full_name='io.prometheus.client.MetricFamily.help', index=1,
|
||||
number=2, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='type', full_name='io.prometheus.client.MetricFamily.type', index=2,
|
||||
number=3, type=14, cpp_type=8, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='metric', full_name='io.prometheus.client.MetricFamily.metric', index=3,
|
||||
number=4, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=785,
|
||||
serialized_end=921,
|
||||
)
|
||||
|
||||
_SUMMARY.fields_by_name['quantile'].message_type = _QUANTILE
|
||||
_HISTOGRAM.fields_by_name['bucket'].message_type = _BUCKET
|
||||
_METRIC.fields_by_name['label'].message_type = _LABELPAIR
|
||||
_METRIC.fields_by_name['gauge'].message_type = _GAUGE
|
||||
_METRIC.fields_by_name['counter'].message_type = _COUNTER
|
||||
_METRIC.fields_by_name['summary'].message_type = _SUMMARY
|
||||
_METRIC.fields_by_name['untyped'].message_type = _UNTYPED
|
||||
_METRIC.fields_by_name['histogram'].message_type = _HISTOGRAM
|
||||
_METRICFAMILY.fields_by_name['type'].enum_type = _METRICTYPE
|
||||
_METRICFAMILY.fields_by_name['metric'].message_type = _METRIC
|
||||
DESCRIPTOR.message_types_by_name['LabelPair'] = _LABELPAIR
|
||||
DESCRIPTOR.message_types_by_name['Gauge'] = _GAUGE
|
||||
DESCRIPTOR.message_types_by_name['Counter'] = _COUNTER
|
||||
DESCRIPTOR.message_types_by_name['Quantile'] = _QUANTILE
|
||||
DESCRIPTOR.message_types_by_name['Summary'] = _SUMMARY
|
||||
DESCRIPTOR.message_types_by_name['Untyped'] = _UNTYPED
|
||||
DESCRIPTOR.message_types_by_name['Histogram'] = _HISTOGRAM
|
||||
DESCRIPTOR.message_types_by_name['Bucket'] = _BUCKET
|
||||
DESCRIPTOR.message_types_by_name['Metric'] = _METRIC
|
||||
DESCRIPTOR.message_types_by_name['MetricFamily'] = _METRICFAMILY
|
||||
DESCRIPTOR.enum_types_by_name['MetricType'] = _METRICTYPE
|
||||
|
||||
LabelPair = _reflection.GeneratedProtocolMessageType('LabelPair', (_message.Message,), dict(
|
||||
DESCRIPTOR = _LABELPAIR,
|
||||
__module__ = 'metrics_pb2'
|
||||
# @@protoc_insertion_point(class_scope:io.prometheus.client.LabelPair)
|
||||
))
|
||||
_sym_db.RegisterMessage(LabelPair)
|
||||
|
||||
Gauge = _reflection.GeneratedProtocolMessageType('Gauge', (_message.Message,), dict(
|
||||
DESCRIPTOR = _GAUGE,
|
||||
__module__ = 'metrics_pb2'
|
||||
# @@protoc_insertion_point(class_scope:io.prometheus.client.Gauge)
|
||||
))
|
||||
_sym_db.RegisterMessage(Gauge)
|
||||
|
||||
Counter = _reflection.GeneratedProtocolMessageType('Counter', (_message.Message,), dict(
|
||||
DESCRIPTOR = _COUNTER,
|
||||
__module__ = 'metrics_pb2'
|
||||
# @@protoc_insertion_point(class_scope:io.prometheus.client.Counter)
|
||||
))
|
||||
_sym_db.RegisterMessage(Counter)
|
||||
|
||||
Quantile = _reflection.GeneratedProtocolMessageType('Quantile', (_message.Message,), dict(
|
||||
DESCRIPTOR = _QUANTILE,
|
||||
__module__ = 'metrics_pb2'
|
||||
# @@protoc_insertion_point(class_scope:io.prometheus.client.Quantile)
|
||||
))
|
||||
_sym_db.RegisterMessage(Quantile)
|
||||
|
||||
Summary = _reflection.GeneratedProtocolMessageType('Summary', (_message.Message,), dict(
|
||||
DESCRIPTOR = _SUMMARY,
|
||||
__module__ = 'metrics_pb2'
|
||||
# @@protoc_insertion_point(class_scope:io.prometheus.client.Summary)
|
||||
))
|
||||
_sym_db.RegisterMessage(Summary)
|
||||
|
||||
Untyped = _reflection.GeneratedProtocolMessageType('Untyped', (_message.Message,), dict(
|
||||
DESCRIPTOR = _UNTYPED,
|
||||
__module__ = 'metrics_pb2'
|
||||
# @@protoc_insertion_point(class_scope:io.prometheus.client.Untyped)
|
||||
))
|
||||
_sym_db.RegisterMessage(Untyped)
|
||||
|
||||
Histogram = _reflection.GeneratedProtocolMessageType('Histogram', (_message.Message,), dict(
|
||||
DESCRIPTOR = _HISTOGRAM,
|
||||
__module__ = 'metrics_pb2'
|
||||
# @@protoc_insertion_point(class_scope:io.prometheus.client.Histogram)
|
||||
))
|
||||
_sym_db.RegisterMessage(Histogram)
|
||||
|
||||
Bucket = _reflection.GeneratedProtocolMessageType('Bucket', (_message.Message,), dict(
|
||||
DESCRIPTOR = _BUCKET,
|
||||
__module__ = 'metrics_pb2'
|
||||
# @@protoc_insertion_point(class_scope:io.prometheus.client.Bucket)
|
||||
))
|
||||
_sym_db.RegisterMessage(Bucket)
|
||||
|
||||
Metric = _reflection.GeneratedProtocolMessageType('Metric', (_message.Message,), dict(
|
||||
DESCRIPTOR = _METRIC,
|
||||
__module__ = 'metrics_pb2'
|
||||
# @@protoc_insertion_point(class_scope:io.prometheus.client.Metric)
|
||||
))
|
||||
_sym_db.RegisterMessage(Metric)
|
||||
|
||||
MetricFamily = _reflection.GeneratedProtocolMessageType('MetricFamily', (_message.Message,), dict(
|
||||
DESCRIPTOR = _METRICFAMILY,
|
||||
__module__ = 'metrics_pb2'
|
||||
# @@protoc_insertion_point(class_scope:io.prometheus.client.MetricFamily)
|
||||
))
|
||||
_sym_db.RegisterMessage(MetricFamily)
|
||||
|
||||
|
||||
DESCRIPTOR.has_options = True
|
||||
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\024io.prometheus.client'))
|
||||
# @@protoc_insertion_point(module_scope)
|
5
vendor/github.com/prometheus/client_model/ruby/.gitignore
generated
vendored
Normal file
5
vendor/github.com/prometheus/client_model/ruby/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
*.gem
|
||||
.bundle
|
||||
Gemfile.lock
|
||||
pkg
|
||||
vendor/bundle
|
4
vendor/github.com/prometheus/client_model/ruby/Gemfile
generated
vendored
Normal file
4
vendor/github.com/prometheus/client_model/ruby/Gemfile
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
source 'https://rubygems.org'
|
||||
|
||||
# Specify your gem's dependencies in prometheus-client-model.gemspec
|
||||
gemspec
|
201
vendor/github.com/prometheus/client_model/ruby/LICENSE
generated
vendored
Normal file
201
vendor/github.com/prometheus/client_model/ruby/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
17
vendor/github.com/prometheus/client_model/ruby/Makefile
generated
vendored
Normal file
17
vendor/github.com/prometheus/client_model/ruby/Makefile
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
VENDOR_BUNDLE = vendor/bundle
|
||||
|
||||
build: $(VENDOR_BUNDLE)/.bundled
|
||||
BEEFCAKE_NAMESPACE=Prometheus::Client protoc --beefcake_out lib/prometheus/client/model -I .. ../metrics.proto
|
||||
|
||||
$(VENDOR_BUNDLE):
|
||||
mkdir -p $@
|
||||
|
||||
$(VENDOR_BUNDLE)/.bundled: $(VENDOR_BUNDLE) Gemfile
|
||||
bundle install --quiet --path $<
|
||||
@touch $@
|
||||
|
||||
clean:
|
||||
-rm -f lib/prometheus/client/model/metrics.pb.rb
|
||||
-rm -rf $(VENDOR_BUNDLE)
|
||||
|
||||
.PHONY: build clean
|
31
vendor/github.com/prometheus/client_model/ruby/README.md
generated
vendored
Normal file
31
vendor/github.com/prometheus/client_model/ruby/README.md
generated
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
# Prometheus Ruby client model
|
||||
|
||||
Data model artifacts for the [Prometheus Ruby client][1].
|
||||
|
||||
## Installation
|
||||
|
||||
gem install prometheus-client-model
|
||||
|
||||
## Usage
|
||||
|
||||
Build the artifacts from the protobuf specification:
|
||||
|
||||
make build
|
||||
|
||||
While this Gem's main purpose is to define the Prometheus data types for the
|
||||
[client][1], it's possible to use it without the client to decode a stream of
|
||||
delimited protobuf messages:
|
||||
|
||||
```ruby
|
||||
require 'open-uri'
|
||||
require 'prometheus/client/model'
|
||||
|
||||
CONTENT_TYPE = 'application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited'
|
||||
|
||||
stream = open('http://localhost:9090/metrics', 'Accept' => CONTENT_TYPE).read
|
||||
while family = Prometheus::Client::MetricFamily.read_delimited(stream)
|
||||
puts family
|
||||
end
|
||||
```
|
||||
|
||||
[1]: https://github.com/prometheus/client_ruby
|
1
vendor/github.com/prometheus/client_model/ruby/Rakefile
generated
vendored
Normal file
1
vendor/github.com/prometheus/client_model/ruby/Rakefile
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
require "bundler/gem_tasks"
|
2
vendor/github.com/prometheus/client_model/ruby/lib/prometheus/client/model.rb
generated
vendored
Normal file
2
vendor/github.com/prometheus/client_model/ruby/lib/prometheus/client/model.rb
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
require 'prometheus/client/model/metrics.pb'
|
||||
require 'prometheus/client/model/version'
|
111
vendor/github.com/prometheus/client_model/ruby/lib/prometheus/client/model/metrics.pb.rb
generated
vendored
Normal file
111
vendor/github.com/prometheus/client_model/ruby/lib/prometheus/client/model/metrics.pb.rb
generated
vendored
Normal file
|
@ -0,0 +1,111 @@
|
|||
## Generated from metrics.proto for io.prometheus.client
|
||||
require "beefcake"
|
||||
|
||||
module Prometheus
|
||||
module Client
|
||||
|
||||
module MetricType
|
||||
COUNTER = 0
|
||||
GAUGE = 1
|
||||
SUMMARY = 2
|
||||
UNTYPED = 3
|
||||
HISTOGRAM = 4
|
||||
end
|
||||
|
||||
class LabelPair
|
||||
include Beefcake::Message
|
||||
end
|
||||
|
||||
class Gauge
|
||||
include Beefcake::Message
|
||||
end
|
||||
|
||||
class Counter
|
||||
include Beefcake::Message
|
||||
end
|
||||
|
||||
class Quantile
|
||||
include Beefcake::Message
|
||||
end
|
||||
|
||||
class Summary
|
||||
include Beefcake::Message
|
||||
end
|
||||
|
||||
class Untyped
|
||||
include Beefcake::Message
|
||||
end
|
||||
|
||||
class Histogram
|
||||
include Beefcake::Message
|
||||
end
|
||||
|
||||
class Bucket
|
||||
include Beefcake::Message
|
||||
end
|
||||
|
||||
class Metric
|
||||
include Beefcake::Message
|
||||
end
|
||||
|
||||
class MetricFamily
|
||||
include Beefcake::Message
|
||||
end
|
||||
|
||||
class LabelPair
|
||||
optional :name, :string, 1
|
||||
optional :value, :string, 2
|
||||
end
|
||||
|
||||
class Gauge
|
||||
optional :value, :double, 1
|
||||
end
|
||||
|
||||
class Counter
|
||||
optional :value, :double, 1
|
||||
end
|
||||
|
||||
class Quantile
|
||||
optional :quantile, :double, 1
|
||||
optional :value, :double, 2
|
||||
end
|
||||
|
||||
class Summary
|
||||
optional :sample_count, :uint64, 1
|
||||
optional :sample_sum, :double, 2
|
||||
repeated :quantile, Quantile, 3
|
||||
end
|
||||
|
||||
class Untyped
|
||||
optional :value, :double, 1
|
||||
end
|
||||
|
||||
class Histogram
|
||||
optional :sample_count, :uint64, 1
|
||||
optional :sample_sum, :double, 2
|
||||
repeated :bucket, Bucket, 3
|
||||
end
|
||||
|
||||
class Bucket
|
||||
optional :cumulative_count, :uint64, 1
|
||||
optional :upper_bound, :double, 2
|
||||
end
|
||||
|
||||
class Metric
|
||||
repeated :label, LabelPair, 1
|
||||
optional :gauge, Gauge, 2
|
||||
optional :counter, Counter, 3
|
||||
optional :summary, Summary, 4
|
||||
optional :untyped, Untyped, 5
|
||||
optional :histogram, Histogram, 7
|
||||
optional :timestamp_ms, :int64, 6
|
||||
end
|
||||
|
||||
class MetricFamily
|
||||
optional :name, :string, 1
|
||||
optional :help, :string, 2
|
||||
optional :type, MetricType, 3
|
||||
repeated :metric, Metric, 4
|
||||
end
|
||||
end
|
||||
end
|
7
vendor/github.com/prometheus/client_model/ruby/lib/prometheus/client/model/version.rb
generated
vendored
Normal file
7
vendor/github.com/prometheus/client_model/ruby/lib/prometheus/client/model/version.rb
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
module Prometheus
|
||||
module Client
|
||||
module Model
|
||||
VERSION = '0.1.0'
|
||||
end
|
||||
end
|
||||
end
|
22
vendor/github.com/prometheus/client_model/ruby/prometheus-client-model.gemspec
generated
vendored
Normal file
22
vendor/github.com/prometheus/client_model/ruby/prometheus-client-model.gemspec
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
# coding: utf-8
|
||||
lib = File.expand_path('../lib', __FILE__)
|
||||
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
||||
require 'prometheus/client/model/version'
|
||||
|
||||
Gem::Specification.new do |spec|
|
||||
spec.name = 'prometheus-client-model'
|
||||
spec.version = Prometheus::Client::Model::VERSION
|
||||
spec.authors = ['Tobias Schmidt']
|
||||
spec.email = ['tobidt@gmail.com']
|
||||
spec.summary = 'Data model artifacts for the Prometheus Ruby client'
|
||||
spec.homepage = 'https://github.com/prometheus/client_model/tree/master/ruby'
|
||||
spec.license = 'Apache 2.0'
|
||||
|
||||
spec.files = %w[README.md LICENSE] + Dir.glob('{lib/**/*}')
|
||||
spec.require_paths = ['lib']
|
||||
|
||||
spec.add_dependency 'beefcake', '>= 0.4.0'
|
||||
|
||||
spec.add_development_dependency 'bundler', '~> 1.3'
|
||||
spec.add_development_dependency 'rake'
|
||||
end
|
7683
vendor/github.com/prometheus/client_model/src/main/java/io/prometheus/client/Metrics.java
generated
vendored
Normal file
7683
vendor/github.com/prometheus/client_model/src/main/java/io/prometheus/client/Metrics.java
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
34
vendor/github.com/prometheus/common/config/config.go
generated
vendored
Normal file
34
vendor/github.com/prometheus/common/config/config.go
generated
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
// Copyright 2016 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// This package no longer handles safe yaml parsing. In order to
|
||||
// ensure correct yaml unmarshalling, use "yaml.UnmarshalStrict()".
|
||||
|
||||
package config
|
||||
|
||||
// Secret special type for storing secrets.
|
||||
type Secret string
|
||||
|
||||
// MarshalYAML implements the yaml.Marshaler interface for Secrets.
|
||||
func (s Secret) MarshalYAML() (interface{}, error) {
|
||||
if s != "" {
|
||||
return "<secret>", nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
//UnmarshalYAML implements the yaml.Unmarshaler interface for Secrets.
|
||||
func (s *Secret) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
type plain Secret
|
||||
return unmarshal((*plain)(s))
|
||||
}
|
320
vendor/github.com/prometheus/common/config/http_config.go
generated
vendored
Normal file
320
vendor/github.com/prometheus/common/config/http_config.go
generated
vendored
Normal file
|
@ -0,0 +1,320 @@
|
|||
// Copyright 2016 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/mwitkow/go-conntrack"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// BasicAuth contains basic HTTP authentication credentials.
|
||||
type BasicAuth struct {
|
||||
Username string `yaml:"username"`
|
||||
Password Secret `yaml:"password,omitempty"`
|
||||
PasswordFile string `yaml:"password_file,omitempty"`
|
||||
}
|
||||
|
||||
// URL is a custom URL type that allows validation at configuration load time.
|
||||
type URL struct {
|
||||
*url.URL
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface for URLs.
|
||||
func (u *URL) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
var s string
|
||||
if err := unmarshal(&s); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
urlp, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
u.URL = urlp
|
||||
return nil
|
||||
}
|
||||
|
||||
// MarshalYAML implements the yaml.Marshaler interface for URLs.
|
||||
func (u URL) MarshalYAML() (interface{}, error) {
|
||||
if u.URL != nil {
|
||||
return u.String(), nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// HTTPClientConfig configures an HTTP client.
|
||||
type HTTPClientConfig struct {
|
||||
// The HTTP basic authentication credentials for the targets.
|
||||
BasicAuth *BasicAuth `yaml:"basic_auth,omitempty"`
|
||||
// The bearer token for the targets.
|
||||
BearerToken Secret `yaml:"bearer_token,omitempty"`
|
||||
// The bearer token file for the targets.
|
||||
BearerTokenFile string `yaml:"bearer_token_file,omitempty"`
|
||||
// HTTP proxy server to use to connect to the targets.
|
||||
ProxyURL URL `yaml:"proxy_url,omitempty"`
|
||||
// TLSConfig to use to connect to the targets.
|
||||
TLSConfig TLSConfig `yaml:"tls_config,omitempty"`
|
||||
}
|
||||
|
||||
// Validate validates the HTTPClientConfig to check only one of BearerToken,
|
||||
// BasicAuth and BearerTokenFile is configured.
|
||||
func (c *HTTPClientConfig) Validate() error {
|
||||
if len(c.BearerToken) > 0 && len(c.BearerTokenFile) > 0 {
|
||||
return fmt.Errorf("at most one of bearer_token & bearer_token_file must be configured")
|
||||
}
|
||||
if c.BasicAuth != nil && (len(c.BearerToken) > 0 || len(c.BearerTokenFile) > 0) {
|
||||
return fmt.Errorf("at most one of basic_auth, bearer_token & bearer_token_file must be configured")
|
||||
}
|
||||
if c.BasicAuth != nil && c.BasicAuth.Username == "" {
|
||||
return fmt.Errorf("basic_auth requires a username")
|
||||
}
|
||||
if c.BasicAuth != nil && (string(c.BasicAuth.Password) != "" && c.BasicAuth.PasswordFile != "") {
|
||||
return fmt.Errorf("at most one of basic_auth password & password_file must be configured")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface
|
||||
func (c *HTTPClientConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
type plain HTTPClientConfig
|
||||
if err := unmarshal((*plain)(c)); err != nil {
|
||||
return err
|
||||
}
|
||||
return c.Validate()
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (a *BasicAuth) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
type plain BasicAuth
|
||||
return unmarshal((*plain)(a))
|
||||
}
|
||||
|
||||
// NewClient returns a http.Client using the specified http.RoundTripper.
|
||||
func newClient(rt http.RoundTripper) *http.Client {
|
||||
return &http.Client{Transport: rt}
|
||||
}
|
||||
|
||||
// NewClientFromConfig returns a new HTTP client configured for the
|
||||
// given config.HTTPClientConfig. The name is used as go-conntrack metric label.
|
||||
func NewClientFromConfig(cfg HTTPClientConfig, name string) (*http.Client, error) {
|
||||
rt, err := NewRoundTripperFromConfig(cfg, name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newClient(rt), nil
|
||||
}
|
||||
|
||||
// NewRoundTripperFromConfig returns a new HTTP RoundTripper configured for the
|
||||
// given config.HTTPClientConfig. The name is used as go-conntrack metric label.
|
||||
func NewRoundTripperFromConfig(cfg HTTPClientConfig, name string) (http.RoundTripper, error) {
|
||||
tlsConfig, err := NewTLSConfig(&cfg.TLSConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// The only timeout we care about is the configured scrape timeout.
|
||||
// It is applied on request. So we leave out any timings here.
|
||||
var rt http.RoundTripper = &http.Transport{
|
||||
Proxy: http.ProxyURL(cfg.ProxyURL.URL),
|
||||
MaxIdleConns: 20000,
|
||||
MaxIdleConnsPerHost: 1000, // see https://github.com/golang/go/issues/13801
|
||||
DisableKeepAlives: false,
|
||||
TLSClientConfig: tlsConfig,
|
||||
DisableCompression: true,
|
||||
// 5 minutes is typically above the maximum sane scrape interval. So we can
|
||||
// use keepalive for all configurations.
|
||||
IdleConnTimeout: 5 * time.Minute,
|
||||
DialContext: conntrack.NewDialContextFunc(
|
||||
conntrack.DialWithTracing(),
|
||||
conntrack.DialWithName(name),
|
||||
),
|
||||
}
|
||||
|
||||
// If a bearer token is provided, create a round tripper that will set the
|
||||
// Authorization header correctly on each request.
|
||||
if len(cfg.BearerToken) > 0 {
|
||||
rt = NewBearerAuthRoundTripper(cfg.BearerToken, rt)
|
||||
} else if len(cfg.BearerTokenFile) > 0 {
|
||||
rt = NewBearerAuthFileRoundTripper(cfg.BearerTokenFile, rt)
|
||||
}
|
||||
|
||||
if cfg.BasicAuth != nil {
|
||||
rt = NewBasicAuthRoundTripper(cfg.BasicAuth.Username, cfg.BasicAuth.Password, cfg.BasicAuth.PasswordFile, rt)
|
||||
}
|
||||
|
||||
// Return a new configured RoundTripper.
|
||||
return rt, nil
|
||||
}
|
||||
|
||||
type bearerAuthRoundTripper struct {
|
||||
bearerToken Secret
|
||||
rt http.RoundTripper
|
||||
}
|
||||
|
||||
// NewBearerAuthRoundTripper adds the provided bearer token to a request unless the authorization
|
||||
// header has already been set.
|
||||
func NewBearerAuthRoundTripper(token Secret, rt http.RoundTripper) http.RoundTripper {
|
||||
return &bearerAuthRoundTripper{token, rt}
|
||||
}
|
||||
|
||||
func (rt *bearerAuthRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
if len(req.Header.Get("Authorization")) == 0 {
|
||||
req = cloneRequest(req)
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", string(rt.bearerToken)))
|
||||
}
|
||||
return rt.rt.RoundTrip(req)
|
||||
}
|
||||
|
||||
type bearerAuthFileRoundTripper struct {
|
||||
bearerFile string
|
||||
rt http.RoundTripper
|
||||
}
|
||||
|
||||
// NewBearerAuthFileRoundTripper adds the bearer token read from the provided file to a request unless
|
||||
// the authorization header has already been set. This file is read for every request.
|
||||
func NewBearerAuthFileRoundTripper(bearerFile string, rt http.RoundTripper) http.RoundTripper {
|
||||
return &bearerAuthFileRoundTripper{bearerFile, rt}
|
||||
}
|
||||
|
||||
func (rt *bearerAuthFileRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
if len(req.Header.Get("Authorization")) == 0 {
|
||||
b, err := ioutil.ReadFile(rt.bearerFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read bearer token file %s: %s", rt.bearerFile, err)
|
||||
}
|
||||
bearerToken := strings.TrimSpace(string(b))
|
||||
|
||||
req = cloneRequest(req)
|
||||
req.Header.Set("Authorization", "Bearer "+bearerToken)
|
||||
}
|
||||
|
||||
return rt.rt.RoundTrip(req)
|
||||
}
|
||||
|
||||
type basicAuthRoundTripper struct {
|
||||
username string
|
||||
password Secret
|
||||
passwordFile string
|
||||
rt http.RoundTripper
|
||||
}
|
||||
|
||||
// NewBasicAuthRoundTripper will apply a BASIC auth authorization header to a request unless it has
|
||||
// already been set.
|
||||
func NewBasicAuthRoundTripper(username string, password Secret, passwordFile string, rt http.RoundTripper) http.RoundTripper {
|
||||
return &basicAuthRoundTripper{username, password, passwordFile, rt}
|
||||
}
|
||||
|
||||
func (rt *basicAuthRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
if len(req.Header.Get("Authorization")) != 0 {
|
||||
return rt.rt.RoundTrip(req)
|
||||
}
|
||||
req = cloneRequest(req)
|
||||
if rt.passwordFile != "" {
|
||||
bs, err := ioutil.ReadFile(rt.passwordFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read basic auth password file %s: %s", rt.passwordFile, err)
|
||||
}
|
||||
req.SetBasicAuth(rt.username, strings.TrimSpace(string(bs)))
|
||||
} else {
|
||||
req.SetBasicAuth(rt.username, strings.TrimSpace(string(rt.password)))
|
||||
}
|
||||
return rt.rt.RoundTrip(req)
|
||||
}
|
||||
|
||||
// cloneRequest returns a clone of the provided *http.Request.
|
||||
// The clone is a shallow copy of the struct and its Header map.
|
||||
func cloneRequest(r *http.Request) *http.Request {
|
||||
// Shallow copy of the struct.
|
||||
r2 := new(http.Request)
|
||||
*r2 = *r
|
||||
// Deep copy of the Header.
|
||||
r2.Header = make(http.Header)
|
||||
for k, s := range r.Header {
|
||||
r2.Header[k] = s
|
||||
}
|
||||
return r2
|
||||
}
|
||||
|
||||
// NewTLSConfig creates a new tls.Config from the given TLSConfig.
|
||||
func NewTLSConfig(cfg *TLSConfig) (*tls.Config, error) {
|
||||
tlsConfig := &tls.Config{InsecureSkipVerify: cfg.InsecureSkipVerify}
|
||||
|
||||
// If a CA cert is provided then let's read it in so we can validate the
|
||||
// scrape target's certificate properly.
|
||||
if len(cfg.CAFile) > 0 {
|
||||
caCertPool := x509.NewCertPool()
|
||||
// Load CA cert.
|
||||
caCert, err := ioutil.ReadFile(cfg.CAFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to use specified CA cert %s: %s", cfg.CAFile, err)
|
||||
}
|
||||
caCertPool.AppendCertsFromPEM(caCert)
|
||||
tlsConfig.RootCAs = caCertPool
|
||||
}
|
||||
|
||||
if len(cfg.ServerName) > 0 {
|
||||
tlsConfig.ServerName = cfg.ServerName
|
||||
}
|
||||
// If a client cert & key is provided then configure TLS config accordingly.
|
||||
if len(cfg.CertFile) > 0 && len(cfg.KeyFile) == 0 {
|
||||
return nil, fmt.Errorf("client cert file %q specified without client key file", cfg.CertFile)
|
||||
} else if len(cfg.KeyFile) > 0 && len(cfg.CertFile) == 0 {
|
||||
return nil, fmt.Errorf("client key file %q specified without client cert file", cfg.KeyFile)
|
||||
} else if len(cfg.CertFile) > 0 && len(cfg.KeyFile) > 0 {
|
||||
cert, err := tls.LoadX509KeyPair(cfg.CertFile, cfg.KeyFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to use specified client cert (%s) & key (%s): %s", cfg.CertFile, cfg.KeyFile, err)
|
||||
}
|
||||
tlsConfig.Certificates = []tls.Certificate{cert}
|
||||
}
|
||||
tlsConfig.BuildNameToCertificate()
|
||||
|
||||
return tlsConfig, nil
|
||||
}
|
||||
|
||||
// TLSConfig configures the options for TLS connections.
|
||||
type TLSConfig struct {
|
||||
// The CA cert to use for the targets.
|
||||
CAFile string `yaml:"ca_file,omitempty"`
|
||||
// The client cert file for the targets.
|
||||
CertFile string `yaml:"cert_file,omitempty"`
|
||||
// The client key file for the targets.
|
||||
KeyFile string `yaml:"key_file,omitempty"`
|
||||
// Used to verify the hostname for the targets.
|
||||
ServerName string `yaml:"server_name,omitempty"`
|
||||
// Disable target certificate validation.
|
||||
InsecureSkipVerify bool `yaml:"insecure_skip_verify"`
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||
func (c *TLSConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
type plain TLSConfig
|
||||
return unmarshal((*plain)(c))
|
||||
}
|
||||
|
||||
func (c HTTPClientConfig) String() string {
|
||||
b, err := yaml.Marshal(c)
|
||||
if err != nil {
|
||||
return fmt.Sprintf("<error creating http client config string: %s>", err)
|
||||
}
|
||||
return string(b)
|
||||
}
|
592
vendor/github.com/prometheus/common/config/http_config_test.go
generated
vendored
Normal file
592
vendor/github.com/prometheus/common/config/http_config_test.go
generated
vendored
Normal file
|
@ -0,0 +1,592 @@
|
|||
// Copyright 2015 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
const (
|
||||
TLSCAChainPath = "testdata/tls-ca-chain.pem"
|
||||
ServerCertificatePath = "testdata/server.crt"
|
||||
ServerKeyPath = "testdata/server.key"
|
||||
BarneyCertificatePath = "testdata/barney.crt"
|
||||
BarneyKeyNoPassPath = "testdata/barney-no-pass.key"
|
||||
MissingCA = "missing/ca.crt"
|
||||
MissingCert = "missing/cert.crt"
|
||||
MissingKey = "missing/secret.key"
|
||||
|
||||
ExpectedMessage = "I'm here to serve you!!!"
|
||||
BearerToken = "theanswertothegreatquestionoflifetheuniverseandeverythingisfortytwo"
|
||||
BearerTokenFile = "testdata/bearer.token"
|
||||
MissingBearerTokenFile = "missing/bearer.token"
|
||||
ExpectedBearer = "Bearer " + BearerToken
|
||||
ExpectedUsername = "arthurdent"
|
||||
ExpectedPassword = "42"
|
||||
)
|
||||
|
||||
var invalidHTTPClientConfigs = []struct {
|
||||
httpClientConfigFile string
|
||||
errMsg string
|
||||
}{
|
||||
{
|
||||
httpClientConfigFile: "testdata/http.conf.bearer-token-and-file-set.bad.yml",
|
||||
errMsg: "at most one of bearer_token & bearer_token_file must be configured",
|
||||
},
|
||||
{
|
||||
httpClientConfigFile: "testdata/http.conf.empty.bad.yml",
|
||||
errMsg: "at most one of basic_auth, bearer_token & bearer_token_file must be configured",
|
||||
},
|
||||
{
|
||||
httpClientConfigFile: "testdata/http.conf.basic-auth.too-much.bad.yaml",
|
||||
errMsg: "at most one of basic_auth password & password_file must be configured",
|
||||
},
|
||||
}
|
||||
|
||||
func newTestServer(handler func(w http.ResponseWriter, r *http.Request)) (*httptest.Server, error) {
|
||||
testServer := httptest.NewUnstartedServer(http.HandlerFunc(handler))
|
||||
|
||||
tlsCAChain, err := ioutil.ReadFile(TLSCAChainPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Can't read %s", TLSCAChainPath)
|
||||
}
|
||||
serverCertificate, err := tls.LoadX509KeyPair(ServerCertificatePath, ServerKeyPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Can't load X509 key pair %s - %s", ServerCertificatePath, ServerKeyPath)
|
||||
}
|
||||
|
||||
rootCAs := x509.NewCertPool()
|
||||
rootCAs.AppendCertsFromPEM(tlsCAChain)
|
||||
|
||||
testServer.TLS = &tls.Config{
|
||||
Certificates: make([]tls.Certificate, 1),
|
||||
RootCAs: rootCAs,
|
||||
ClientAuth: tls.RequireAndVerifyClientCert,
|
||||
ClientCAs: rootCAs}
|
||||
testServer.TLS.Certificates[0] = serverCertificate
|
||||
testServer.TLS.BuildNameToCertificate()
|
||||
|
||||
testServer.StartTLS()
|
||||
|
||||
return testServer, nil
|
||||
}
|
||||
|
||||
func TestNewClientFromConfig(t *testing.T) {
|
||||
var newClientValidConfig = []struct {
|
||||
clientConfig HTTPClientConfig
|
||||
handler func(w http.ResponseWriter, r *http.Request)
|
||||
}{
|
||||
{
|
||||
clientConfig: HTTPClientConfig{
|
||||
TLSConfig: TLSConfig{
|
||||
CAFile: "",
|
||||
CertFile: BarneyCertificatePath,
|
||||
KeyFile: BarneyKeyNoPassPath,
|
||||
ServerName: "",
|
||||
InsecureSkipVerify: true},
|
||||
},
|
||||
handler: func(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprint(w, ExpectedMessage)
|
||||
},
|
||||
}, {
|
||||
clientConfig: HTTPClientConfig{
|
||||
TLSConfig: TLSConfig{
|
||||
CAFile: TLSCAChainPath,
|
||||
CertFile: BarneyCertificatePath,
|
||||
KeyFile: BarneyKeyNoPassPath,
|
||||
ServerName: "",
|
||||
InsecureSkipVerify: false},
|
||||
},
|
||||
handler: func(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprint(w, ExpectedMessage)
|
||||
},
|
||||
}, {
|
||||
clientConfig: HTTPClientConfig{
|
||||
BearerToken: BearerToken,
|
||||
TLSConfig: TLSConfig{
|
||||
CAFile: TLSCAChainPath,
|
||||
CertFile: BarneyCertificatePath,
|
||||
KeyFile: BarneyKeyNoPassPath,
|
||||
ServerName: "",
|
||||
InsecureSkipVerify: false},
|
||||
},
|
||||
handler: func(w http.ResponseWriter, r *http.Request) {
|
||||
bearer := r.Header.Get("Authorization")
|
||||
if bearer != ExpectedBearer {
|
||||
fmt.Fprintf(w, "The expected Bearer Authorization (%s) differs from the obtained Bearer Authorization (%s)",
|
||||
ExpectedBearer, bearer)
|
||||
} else {
|
||||
fmt.Fprint(w, ExpectedMessage)
|
||||
}
|
||||
},
|
||||
}, {
|
||||
clientConfig: HTTPClientConfig{
|
||||
BearerTokenFile: BearerTokenFile,
|
||||
TLSConfig: TLSConfig{
|
||||
CAFile: TLSCAChainPath,
|
||||
CertFile: BarneyCertificatePath,
|
||||
KeyFile: BarneyKeyNoPassPath,
|
||||
ServerName: "",
|
||||
InsecureSkipVerify: false},
|
||||
},
|
||||
handler: func(w http.ResponseWriter, r *http.Request) {
|
||||
bearer := r.Header.Get("Authorization")
|
||||
if bearer != ExpectedBearer {
|
||||
fmt.Fprintf(w, "The expected Bearer Authorization (%s) differs from the obtained Bearer Authorization (%s)",
|
||||
ExpectedBearer, bearer)
|
||||
} else {
|
||||
fmt.Fprint(w, ExpectedMessage)
|
||||
}
|
||||
},
|
||||
}, {
|
||||
clientConfig: HTTPClientConfig{
|
||||
BasicAuth: &BasicAuth{
|
||||
Username: ExpectedUsername,
|
||||
Password: ExpectedPassword,
|
||||
},
|
||||
TLSConfig: TLSConfig{
|
||||
CAFile: TLSCAChainPath,
|
||||
CertFile: BarneyCertificatePath,
|
||||
KeyFile: BarneyKeyNoPassPath,
|
||||
ServerName: "",
|
||||
InsecureSkipVerify: false},
|
||||
},
|
||||
handler: func(w http.ResponseWriter, r *http.Request) {
|
||||
username, password, ok := r.BasicAuth()
|
||||
if !ok {
|
||||
fmt.Fprintf(w, "The Authorization header wasn't set")
|
||||
} else if ExpectedUsername != username {
|
||||
fmt.Fprintf(w, "The expected username (%s) differs from the obtained username (%s).", ExpectedUsername, username)
|
||||
} else if ExpectedPassword != password {
|
||||
fmt.Fprintf(w, "The expected password (%s) differs from the obtained password (%s).", ExpectedPassword, password)
|
||||
} else {
|
||||
fmt.Fprint(w, ExpectedMessage)
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, validConfig := range newClientValidConfig {
|
||||
testServer, err := newTestServer(validConfig.handler)
|
||||
if err != nil {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
defer testServer.Close()
|
||||
|
||||
client, err := NewClientFromConfig(validConfig.clientConfig, "test")
|
||||
if err != nil {
|
||||
t.Errorf("Can't create a client from this config: %+v", validConfig.clientConfig)
|
||||
continue
|
||||
}
|
||||
response, err := client.Get(testServer.URL)
|
||||
if err != nil {
|
||||
t.Errorf("Can't connect to the test server using this config: %+v", validConfig.clientConfig)
|
||||
continue
|
||||
}
|
||||
|
||||
message, err := ioutil.ReadAll(response.Body)
|
||||
response.Body.Close()
|
||||
if err != nil {
|
||||
t.Errorf("Can't read the server response body using this config: %+v", validConfig.clientConfig)
|
||||
continue
|
||||
}
|
||||
|
||||
trimMessage := strings.TrimSpace(string(message))
|
||||
if ExpectedMessage != trimMessage {
|
||||
t.Errorf("The expected message (%s) differs from the obtained message (%s) using this config: %+v",
|
||||
ExpectedMessage, trimMessage, validConfig.clientConfig)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNewClientFromInvalidConfig(t *testing.T) {
|
||||
var newClientInvalidConfig = []struct {
|
||||
clientConfig HTTPClientConfig
|
||||
errorMsg string
|
||||
}{
|
||||
{
|
||||
clientConfig: HTTPClientConfig{
|
||||
TLSConfig: TLSConfig{
|
||||
CAFile: MissingCA,
|
||||
CertFile: "",
|
||||
KeyFile: "",
|
||||
ServerName: "",
|
||||
InsecureSkipVerify: true},
|
||||
},
|
||||
errorMsg: fmt.Sprintf("unable to use specified CA cert %s:", MissingCA),
|
||||
},
|
||||
}
|
||||
|
||||
for _, invalidConfig := range newClientInvalidConfig {
|
||||
client, err := NewClientFromConfig(invalidConfig.clientConfig, "test")
|
||||
if client != nil {
|
||||
t.Errorf("A client instance was returned instead of nil using this config: %+v", invalidConfig.clientConfig)
|
||||
}
|
||||
if err == nil {
|
||||
t.Errorf("No error was returned using this config: %+v", invalidConfig.clientConfig)
|
||||
}
|
||||
if !strings.Contains(err.Error(), invalidConfig.errorMsg) {
|
||||
t.Errorf("Expected error %s does not contain %s", err.Error(), invalidConfig.errorMsg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMissingBearerAuthFile(t *testing.T) {
|
||||
cfg := HTTPClientConfig{
|
||||
BearerTokenFile: MissingBearerTokenFile,
|
||||
TLSConfig: TLSConfig{
|
||||
CAFile: TLSCAChainPath,
|
||||
CertFile: BarneyCertificatePath,
|
||||
KeyFile: BarneyKeyNoPassPath,
|
||||
ServerName: "",
|
||||
InsecureSkipVerify: false},
|
||||
}
|
||||
handler := func(w http.ResponseWriter, r *http.Request) {
|
||||
bearer := r.Header.Get("Authorization")
|
||||
if bearer != ExpectedBearer {
|
||||
fmt.Fprintf(w, "The expected Bearer Authorization (%s) differs from the obtained Bearer Authorization (%s)",
|
||||
ExpectedBearer, bearer)
|
||||
} else {
|
||||
fmt.Fprint(w, ExpectedMessage)
|
||||
}
|
||||
}
|
||||
|
||||
testServer, err := newTestServer(handler)
|
||||
if err != nil {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
defer testServer.Close()
|
||||
|
||||
client, err := NewClientFromConfig(cfg, "test")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
_, err = client.Get(testServer.URL)
|
||||
if err == nil {
|
||||
t.Fatal("No error is returned here")
|
||||
}
|
||||
|
||||
if !strings.Contains(err.Error(), "unable to read bearer token file missing/bearer.token: open missing/bearer.token: no such file or directory") {
|
||||
t.Fatal("wrong error message being returned")
|
||||
}
|
||||
}
|
||||
|
||||
func TestBearerAuthRoundTripper(t *testing.T) {
|
||||
const (
|
||||
newBearerToken = "goodbyeandthankyouforthefish"
|
||||
)
|
||||
|
||||
fakeRoundTripper := NewRoundTripCheckRequest(func(req *http.Request) {
|
||||
bearer := req.Header.Get("Authorization")
|
||||
if bearer != ExpectedBearer {
|
||||
t.Errorf("The expected Bearer Authorization (%s) differs from the obtained Bearer Authorization (%s)",
|
||||
ExpectedBearer, bearer)
|
||||
}
|
||||
}, nil, nil)
|
||||
|
||||
// Normal flow.
|
||||
bearerAuthRoundTripper := NewBearerAuthRoundTripper(BearerToken, fakeRoundTripper)
|
||||
request, _ := http.NewRequest("GET", "/hitchhiker", nil)
|
||||
request.Header.Set("User-Agent", "Douglas Adams mind")
|
||||
bearerAuthRoundTripper.RoundTrip(request)
|
||||
|
||||
// Should honor already Authorization header set.
|
||||
bearerAuthRoundTripperShouldNotModifyExistingAuthorization := NewBearerAuthRoundTripper(newBearerToken, fakeRoundTripper)
|
||||
request, _ = http.NewRequest("GET", "/hitchhiker", nil)
|
||||
request.Header.Set("Authorization", ExpectedBearer)
|
||||
bearerAuthRoundTripperShouldNotModifyExistingAuthorization.RoundTrip(request)
|
||||
}
|
||||
|
||||
func TestBearerAuthFileRoundTripper(t *testing.T) {
|
||||
const (
|
||||
newBearerToken = "goodbyeandthankyouforthefish"
|
||||
)
|
||||
|
||||
fakeRoundTripper := NewRoundTripCheckRequest(func(req *http.Request) {
|
||||
bearer := req.Header.Get("Authorization")
|
||||
if bearer != ExpectedBearer {
|
||||
t.Errorf("The expected Bearer Authorization (%s) differs from the obtained Bearer Authorization (%s)",
|
||||
ExpectedBearer, bearer)
|
||||
}
|
||||
}, nil, nil)
|
||||
|
||||
// Normal flow.
|
||||
bearerAuthRoundTripper := NewBearerAuthFileRoundTripper(BearerTokenFile, fakeRoundTripper)
|
||||
request, _ := http.NewRequest("GET", "/hitchhiker", nil)
|
||||
request.Header.Set("User-Agent", "Douglas Adams mind")
|
||||
bearerAuthRoundTripper.RoundTrip(request)
|
||||
|
||||
// Should honor already Authorization header set.
|
||||
bearerAuthRoundTripperShouldNotModifyExistingAuthorization := NewBearerAuthFileRoundTripper(MissingBearerTokenFile, fakeRoundTripper)
|
||||
request, _ = http.NewRequest("GET", "/hitchhiker", nil)
|
||||
request.Header.Set("Authorization", ExpectedBearer)
|
||||
bearerAuthRoundTripperShouldNotModifyExistingAuthorization.RoundTrip(request)
|
||||
}
|
||||
|
||||
func TestTLSConfig(t *testing.T) {
|
||||
configTLSConfig := TLSConfig{
|
||||
CAFile: TLSCAChainPath,
|
||||
CertFile: BarneyCertificatePath,
|
||||
KeyFile: BarneyKeyNoPassPath,
|
||||
ServerName: "localhost",
|
||||
InsecureSkipVerify: false}
|
||||
|
||||
tlsCAChain, err := ioutil.ReadFile(TLSCAChainPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Can't read the CA certificate chain (%s)",
|
||||
TLSCAChainPath)
|
||||
}
|
||||
rootCAs := x509.NewCertPool()
|
||||
rootCAs.AppendCertsFromPEM(tlsCAChain)
|
||||
|
||||
barneyCertificate, err := tls.LoadX509KeyPair(BarneyCertificatePath, BarneyKeyNoPassPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Can't load the client key pair ('%s' and '%s'). Reason: %s",
|
||||
BarneyCertificatePath, BarneyKeyNoPassPath, err)
|
||||
}
|
||||
|
||||
expectedTLSConfig := &tls.Config{
|
||||
RootCAs: rootCAs,
|
||||
Certificates: []tls.Certificate{barneyCertificate},
|
||||
ServerName: configTLSConfig.ServerName,
|
||||
InsecureSkipVerify: configTLSConfig.InsecureSkipVerify}
|
||||
expectedTLSConfig.BuildNameToCertificate()
|
||||
|
||||
tlsConfig, err := NewTLSConfig(&configTLSConfig)
|
||||
if err != nil {
|
||||
t.Fatalf("Can't create a new TLS Config from a configuration (%s).", err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(tlsConfig, expectedTLSConfig) {
|
||||
t.Fatalf("Unexpected TLS Config result: \n\n%+v\n expected\n\n%+v", tlsConfig, expectedTLSConfig)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTLSConfigEmpty(t *testing.T) {
|
||||
configTLSConfig := TLSConfig{
|
||||
CAFile: "",
|
||||
CertFile: "",
|
||||
KeyFile: "",
|
||||
ServerName: "",
|
||||
InsecureSkipVerify: true}
|
||||
|
||||
expectedTLSConfig := &tls.Config{
|
||||
InsecureSkipVerify: configTLSConfig.InsecureSkipVerify}
|
||||
expectedTLSConfig.BuildNameToCertificate()
|
||||
|
||||
tlsConfig, err := NewTLSConfig(&configTLSConfig)
|
||||
if err != nil {
|
||||
t.Fatalf("Can't create a new TLS Config from a configuration (%s).", err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(tlsConfig, expectedTLSConfig) {
|
||||
t.Fatalf("Unexpected TLS Config result: \n\n%+v\n expected\n\n%+v", tlsConfig, expectedTLSConfig)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTLSConfigInvalidCA(t *testing.T) {
|
||||
var invalidTLSConfig = []struct {
|
||||
configTLSConfig TLSConfig
|
||||
errorMessage string
|
||||
}{
|
||||
{
|
||||
configTLSConfig: TLSConfig{
|
||||
CAFile: MissingCA,
|
||||
CertFile: "",
|
||||
KeyFile: "",
|
||||
ServerName: "",
|
||||
InsecureSkipVerify: false},
|
||||
errorMessage: fmt.Sprintf("unable to use specified CA cert %s:", MissingCA),
|
||||
}, {
|
||||
configTLSConfig: TLSConfig{
|
||||
CAFile: "",
|
||||
CertFile: MissingCert,
|
||||
KeyFile: BarneyKeyNoPassPath,
|
||||
ServerName: "",
|
||||
InsecureSkipVerify: false},
|
||||
errorMessage: fmt.Sprintf("unable to use specified client cert (%s) & key (%s):", MissingCert, BarneyKeyNoPassPath),
|
||||
}, {
|
||||
configTLSConfig: TLSConfig{
|
||||
CAFile: "",
|
||||
CertFile: BarneyCertificatePath,
|
||||
KeyFile: MissingKey,
|
||||
ServerName: "",
|
||||
InsecureSkipVerify: false},
|
||||
errorMessage: fmt.Sprintf("unable to use specified client cert (%s) & key (%s):", BarneyCertificatePath, MissingKey),
|
||||
},
|
||||
}
|
||||
|
||||
for _, anInvalididTLSConfig := range invalidTLSConfig {
|
||||
tlsConfig, err := NewTLSConfig(&anInvalididTLSConfig.configTLSConfig)
|
||||
if tlsConfig != nil && err == nil {
|
||||
t.Errorf("The TLS Config could be created even with this %+v", anInvalididTLSConfig.configTLSConfig)
|
||||
continue
|
||||
}
|
||||
if !strings.Contains(err.Error(), anInvalididTLSConfig.errorMessage) {
|
||||
t.Errorf("The expected error should contain %s, but got %s", anInvalididTLSConfig.errorMessage, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestBasicAuthNoPassword(t *testing.T) {
|
||||
cfg, _, err := LoadHTTPConfigFile("testdata/http.conf.basic-auth.no-password.yaml")
|
||||
if err != nil {
|
||||
t.Errorf("Error loading HTTP client config: %v", err)
|
||||
}
|
||||
client, err := NewClientFromConfig(*cfg, "test")
|
||||
if err != nil {
|
||||
t.Errorf("Error creating HTTP Client: %v", err)
|
||||
}
|
||||
|
||||
rt, ok := client.Transport.(*basicAuthRoundTripper)
|
||||
if !ok {
|
||||
t.Fatalf("Error casting to basic auth transport, %v", client.Transport)
|
||||
}
|
||||
|
||||
if rt.username != "user" {
|
||||
t.Errorf("Bad HTTP client username: %s", rt.username)
|
||||
}
|
||||
if string(rt.password) != "" {
|
||||
t.Errorf("Expected empty HTTP client password: %s", rt.password)
|
||||
}
|
||||
if string(rt.passwordFile) != "" {
|
||||
t.Errorf("Expected empty HTTP client passwordFile: %s", rt.passwordFile)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBasicAuthPasswordFile(t *testing.T) {
|
||||
cfg, _, err := LoadHTTPConfigFile("testdata/http.conf.basic-auth.good.yaml")
|
||||
if err != nil {
|
||||
t.Errorf("Error loading HTTP client config: %v", err)
|
||||
}
|
||||
client, err := NewClientFromConfig(*cfg, "test")
|
||||
if err != nil {
|
||||
t.Errorf("Error creating HTTP Client: %v", err)
|
||||
}
|
||||
|
||||
rt, ok := client.Transport.(*basicAuthRoundTripper)
|
||||
if !ok {
|
||||
t.Errorf("Error casting to basic auth transport, %v", client.Transport)
|
||||
}
|
||||
|
||||
if rt.username != "user" {
|
||||
t.Errorf("Bad HTTP client username: %s", rt.username)
|
||||
}
|
||||
if string(rt.password) != "" {
|
||||
t.Errorf("Bad HTTP client password: %s", rt.password)
|
||||
}
|
||||
if string(rt.passwordFile) != "testdata/basic-auth-password" {
|
||||
t.Errorf("Bad HTTP client passwordFile: %s", rt.passwordFile)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHideHTTPClientConfigSecrets(t *testing.T) {
|
||||
c, _, err := LoadHTTPConfigFile("testdata/http.conf.good.yml")
|
||||
if err != nil {
|
||||
t.Errorf("Error parsing %s: %s", "testdata/http.conf.good.yml", err)
|
||||
}
|
||||
|
||||
// String method must not reveal authentication credentials.
|
||||
s := c.String()
|
||||
if strings.Contains(s, "mysecret") {
|
||||
t.Fatal("http client config's String method reveals authentication credentials.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateHTTPConfig(t *testing.T) {
|
||||
cfg, _, err := LoadHTTPConfigFile("testdata/http.conf.good.yml")
|
||||
if err != nil {
|
||||
t.Errorf("Error loading HTTP client config: %v", err)
|
||||
}
|
||||
err = cfg.Validate()
|
||||
if err != nil {
|
||||
t.Fatalf("Error validating %s: %s", "testdata/http.conf.good.yml", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidHTTPConfigs(t *testing.T) {
|
||||
for _, ee := range invalidHTTPClientConfigs {
|
||||
_, _, err := LoadHTTPConfigFile(ee.httpClientConfigFile)
|
||||
if err == nil {
|
||||
t.Error("Expected error with config but got none")
|
||||
continue
|
||||
}
|
||||
if !strings.Contains(err.Error(), ee.errMsg) {
|
||||
t.Errorf("Expected error for invalid HTTP client configuration to contain %q but got: %s", ee.errMsg, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// LoadHTTPConfig parses the YAML input s into a HTTPClientConfig.
|
||||
func LoadHTTPConfig(s string) (*HTTPClientConfig, error) {
|
||||
cfg := &HTTPClientConfig{}
|
||||
err := yaml.UnmarshalStrict([]byte(s), cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
// LoadHTTPConfigFile parses the given YAML file into a HTTPClientConfig.
|
||||
func LoadHTTPConfigFile(filename string) (*HTTPClientConfig, []byte, error) {
|
||||
content, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
cfg, err := LoadHTTPConfig(string(content))
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return cfg, content, nil
|
||||
}
|
||||
|
||||
type roundTrip struct {
|
||||
theResponse *http.Response
|
||||
theError error
|
||||
}
|
||||
|
||||
func (rt *roundTrip) RoundTrip(r *http.Request) (*http.Response, error) {
|
||||
return rt.theResponse, rt.theError
|
||||
}
|
||||
|
||||
type roundTripCheckRequest struct {
|
||||
checkRequest func(*http.Request)
|
||||
roundTrip
|
||||
}
|
||||
|
||||
func (rt *roundTripCheckRequest) RoundTrip(r *http.Request) (*http.Response, error) {
|
||||
rt.checkRequest(r)
|
||||
return rt.theResponse, rt.theError
|
||||
}
|
||||
|
||||
// NewRoundTripCheckRequest creates a new instance of a type that implements http.RoundTripper,
|
||||
// which before returning theResponse and theError, executes checkRequest against a http.Request.
|
||||
func NewRoundTripCheckRequest(checkRequest func(*http.Request), theResponse *http.Response, theError error) http.RoundTripper {
|
||||
return &roundTripCheckRequest{
|
||||
checkRequest: checkRequest,
|
||||
roundTrip: roundTrip{
|
||||
theResponse: theResponse,
|
||||
theError: theError}}
|
||||
}
|
27
vendor/github.com/prometheus/common/config/testdata/barney-no-pass.key
generated
vendored
Normal file
27
vendor/github.com/prometheus/common/config/testdata/barney-no-pass.key
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEpQIBAAKCAQEAxmYjfBZhZbAup9uSULehoqPCv/U+77ETxUNyS2nviWEHDAb/
|
||||
pFS8Btx4oCQ1ECVSyxcUmXSlrvDjMY4sisOHvndNRlGi274M5a8Q5yD1BUqvxq3u
|
||||
XB/+SYNVShBzaswrSjpzMe89AlOPxPjnE14OXh00j2hHunOG4jhlWgJnY0YyvUQQ
|
||||
YWO6KrmKMiZ4MgmY0SWh/ZhlkDJPtkp3aUVM2sheCru/70E9viLGfdlhc2pIMshy
|
||||
wNp4/5IkHBZwbqXFFGX4sRtSXI/auZNvcHOBse+3e3BonWvBWS2lIYbzpX3vLB7B
|
||||
E9BGIxWn1fgNQr14yFPaccSszBvgtmEUONolnwIDAQABAoIBAQC7nBhQHgXKGBl2
|
||||
Z97rb0pstrjRtsLl/Cg68LWi9LEr0tHMIM4bgnkvb8qtfK+k7fZl0BSNrE2EqYvd
|
||||
75jVO2MgzEYJieccLpKZm7u7JGIut9qSYSU2fpaCw6uiVv4dbqY9EhqejKG/km8w
|
||||
j0JMATRK8Qkj1zOE7/wL7dKBlCZaK3u+OT17spuA/21PG/cLiPaSGSA3CU/eqbkU
|
||||
BD6JeBxp33XNTytwWoOvarsigpL0dGqQ7+qhGq6t69qFfWoe9rimV7Ya+tB9zF/U
|
||||
HzOIEspOYvzxe+C7VJjlVFr4haMYmsrO9qRUJ2ofp49OLVdfEANsdVISSvS63BEp
|
||||
gBZN8Ko5AoGBAO1z8y8YCsI+2vBG6nxZ1eMba0KHi3bS8db1TaenJBV22w6WQATh
|
||||
hEaU6VLMFcMvrOUjXN/7HJfnEMyvFT6gb9obPDVEMZw88s9lVN6njgGLZR/jodyN
|
||||
7N7utLopN043Ra0WfEILAXPSz8esT1yn05OZV6AFHxJEWMrX3/4+spCLAoGBANXl
|
||||
RomieVY4u3FF/uzhbzKNNb9ETxrQuexfbangKp5eLniwnr2SQWIbyPzeurwp15J8
|
||||
HvxB2vpNvs1khSwNx9dQfMdiUVPGLWj7MimAHTHsnQ9LVV9W28ghuSWbjQDGTUt1
|
||||
WCCu1MkKIOzupbi+zgsNlI33yilRQKAb9SRxdy29AoGBAOKpvyZiPcrkMxwPpb/k
|
||||
BU7QGpgcSR25CQ+Xg3QZEVHH7h1DgYLnPtwdQ4g8tj1mohTsp7hKvSWndRrdulrY
|
||||
zUyWmOeD3BN2/pTI9rW/nceNp49EPHsLo2O+2xelRlzMWB98ikqEtPM59gt1SSB6
|
||||
N3X6d3GR0fIe+d9PKEtK0Cs3AoGAZ9r8ReXSvm+ra5ON9Nx8znHMEAON2TpRnBi1
|
||||
uY7zgpO+QrGXUfqKrqVJEKbgym4SkribnuYm+fP32eid1McYKk6VV4ZAcMm/0MJv
|
||||
F8Fx64S0ufFdEX6uFl1xdXYyn5apfyMJ2EyrWrYFSKWTZ8GVb753S/tteGRQWa1Z
|
||||
eQly0Y0CgYEAnI6G9KFvXI+MLu5y2LPYAwsesDFzaWwyDl96ioQTA9hNSrjR33Vw
|
||||
xwpiEe0T/WKF8NQ0QWnrQDbTvuCvZUK37TVxscYWuItL6vnBrYqr4Ck0j1BcGwV5
|
||||
jT581A/Vw8JJiR/vfcxgmrFYqoUmkMKDmCN1oImfz09GtQ4jQ1rlxz8=
|
||||
-----END RSA PRIVATE KEY-----
|
96
vendor/github.com/prometheus/common/config/testdata/barney.crt
generated
vendored
Normal file
96
vendor/github.com/prometheus/common/config/testdata/barney.crt
generated
vendored
Normal file
|
@ -0,0 +1,96 @@
|
|||
Certificate:
|
||||
Data:
|
||||
Version: 3 (0x2)
|
||||
Serial Number: 2 (0x2)
|
||||
Signature Algorithm: sha1WithRSAEncryption
|
||||
Issuer: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green TLS CA
|
||||
Validity
|
||||
Not Before: Jul 13 04:02:47 2017 GMT
|
||||
Not After : Jul 13 04:02:47 2019 GMT
|
||||
Subject: C=NO, O=Telenor AS, OU=Support, CN=Barney Rubble
|
||||
Subject Public Key Info:
|
||||
Public Key Algorithm: rsaEncryption
|
||||
Public-Key: (2048 bit)
|
||||
Modulus:
|
||||
00:c6:66:23:7c:16:61:65:b0:2e:a7:db:92:50:b7:
|
||||
a1:a2:a3:c2:bf:f5:3e:ef:b1:13:c5:43:72:4b:69:
|
||||
ef:89:61:07:0c:06:ff:a4:54:bc:06:dc:78:a0:24:
|
||||
35:10:25:52:cb:17:14:99:74:a5:ae:f0:e3:31:8e:
|
||||
2c:8a:c3:87:be:77:4d:46:51:a2:db:be:0c:e5:af:
|
||||
10:e7:20:f5:05:4a:af:c6:ad:ee:5c:1f:fe:49:83:
|
||||
55:4a:10:73:6a:cc:2b:4a:3a:73:31:ef:3d:02:53:
|
||||
8f:c4:f8:e7:13:5e:0e:5e:1d:34:8f:68:47:ba:73:
|
||||
86:e2:38:65:5a:02:67:63:46:32:bd:44:10:61:63:
|
||||
ba:2a:b9:8a:32:26:78:32:09:98:d1:25:a1:fd:98:
|
||||
65:90:32:4f:b6:4a:77:69:45:4c:da:c8:5e:0a:bb:
|
||||
bf:ef:41:3d:be:22:c6:7d:d9:61:73:6a:48:32:c8:
|
||||
72:c0:da:78:ff:92:24:1c:16:70:6e:a5:c5:14:65:
|
||||
f8:b1:1b:52:5c:8f:da:b9:93:6f:70:73:81:b1:ef:
|
||||
b7:7b:70:68:9d:6b:c1:59:2d:a5:21:86:f3:a5:7d:
|
||||
ef:2c:1e:c1:13:d0:46:23:15:a7:d5:f8:0d:42:bd:
|
||||
78:c8:53:da:71:c4:ac:cc:1b:e0:b6:61:14:38:da:
|
||||
25:9f
|
||||
Exponent: 65537 (0x10001)
|
||||
X509v3 extensions:
|
||||
X509v3 Key Usage: critical
|
||||
Digital Signature
|
||||
X509v3 Basic Constraints:
|
||||
CA:FALSE
|
||||
X509v3 Extended Key Usage:
|
||||
TLS Web Client Authentication
|
||||
X509v3 Subject Key Identifier:
|
||||
F4:17:02:DD:1B:01:AB:C5:BC:17:A4:5C:4B:75:8E:EC:B1:E0:C8:F1
|
||||
X509v3 Authority Key Identifier:
|
||||
keyid:AE:42:88:75:DD:05:A6:8E:48:7F:50:69:F9:B7:34:23:49:B8:B4:71
|
||||
|
||||
Authority Information Access:
|
||||
CA Issuers - URI:http://green.no/ca/tls-ca.cer
|
||||
|
||||
X509v3 CRL Distribution Points:
|
||||
|
||||
Full Name:
|
||||
URI:http://green.no/ca/tls-ca.crl
|
||||
|
||||
X509v3 Subject Alternative Name:
|
||||
email:barney@telenor.no
|
||||
Signature Algorithm: sha1WithRSAEncryption
|
||||
96:9a:c5:41:8a:2f:4a:c4:80:d9:2b:1a:cf:07:85:e9:b6:18:
|
||||
01:20:41:b9:c3:d4:ca:d3:2d:66:c3:1d:52:7f:25:d7:92:0c:
|
||||
e9:a9:ae:e6:2e:fa:9d:0a:cf:84:b9:03:f2:63:e3:d3:c9:70:
|
||||
6a:ac:04:5e:a9:2d:a2:43:7a:34:60:f7:a9:32:e1:48:ec:c6:
|
||||
03:ac:b3:06:2e:48:6e:d0:35:11:31:3d:0c:04:66:41:e6:b2:
|
||||
ec:8c:68:f8:e4:bc:47:85:39:60:69:a9:8a:ee:2f:56:88:8a:
|
||||
19:45:d0:84:8e:c2:27:2c:82:9c:07:6c:34:ae:41:61:63:f9:
|
||||
32:cb:8b:33:ea:2c:15:5f:f9:35:b0:3c:51:4d:5f:30:de:0b:
|
||||
88:28:94:79:f3:bd:69:37:ad:12:20:e1:6b:1d:b6:77:d9:83:
|
||||
db:81:a4:53:6c:0f:6a:17:5e:2b:c1:94:c6:42:e3:73:cd:9e:
|
||||
79:1b:8c:89:cd:da:ce:b0:f4:21:c5:32:25:04:6e:68:9f:a7:
|
||||
ca:f4:c5:86:e5:4e:d9:fd:69:73:e6:15:50:6e:76:0f:73:5e:
|
||||
7a:a3:f4:dc:15:4a:ab:bb:3c:9a:fa:9f:01:7a:5c:47:a9:a3:
|
||||
68:1c:49:e0:37:37:77:af:87:07:16:e4:e1:d7:98:39:15:a6:
|
||||
51:5d:4c:db
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEITCCAwmgAwIBAgIBAjANBgkqhkiG9w0BAQUFADBdMQswCQYDVQQGEwJOTzER
|
||||
MA8GA1UECgwIR3JlZW4gQVMxJDAiBgNVBAsMG0dyZWVuIENlcnRpZmljYXRlIEF1
|
||||
dGhvcml0eTEVMBMGA1UEAwwMR3JlZW4gVExTIENBMB4XDTE3MDcxMzA0MDI0N1oX
|
||||
DTE5MDcxMzA0MDI0N1owTDELMAkGA1UEBhMCTk8xEzARBgNVBAoMClRlbGVub3Ig
|
||||
QVMxEDAOBgNVBAsMB1N1cHBvcnQxFjAUBgNVBAMMDUJhcm5leSBSdWJibGUwggEi
|
||||
MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGZiN8FmFlsC6n25JQt6Gio8K/
|
||||
9T7vsRPFQ3JLae+JYQcMBv+kVLwG3HigJDUQJVLLFxSZdKWu8OMxjiyKw4e+d01G
|
||||
UaLbvgzlrxDnIPUFSq/Gre5cH/5Jg1VKEHNqzCtKOnMx7z0CU4/E+OcTXg5eHTSP
|
||||
aEe6c4biOGVaAmdjRjK9RBBhY7oquYoyJngyCZjRJaH9mGWQMk+2SndpRUzayF4K
|
||||
u7/vQT2+IsZ92WFzakgyyHLA2nj/kiQcFnBupcUUZfixG1Jcj9q5k29wc4Gx77d7
|
||||
cGida8FZLaUhhvOlfe8sHsET0EYjFafV+A1CvXjIU9pxxKzMG+C2YRQ42iWfAgMB
|
||||
AAGjgfwwgfkwDgYDVR0PAQH/BAQDAgeAMAkGA1UdEwQCMAAwEwYDVR0lBAwwCgYI
|
||||
KwYBBQUHAwIwHQYDVR0OBBYEFPQXAt0bAavFvBekXEt1juyx4MjxMB8GA1UdIwQY
|
||||
MBaAFK5CiHXdBaaOSH9Qafm3NCNJuLRxMDkGCCsGAQUFBwEBBC0wKzApBggrBgEF
|
||||
BQcwAoYdaHR0cDovL2dyZWVuLm5vL2NhL3Rscy1jYS5jZXIwLgYDVR0fBCcwJTAj
|
||||
oCGgH4YdaHR0cDovL2dyZWVuLm5vL2NhL3Rscy1jYS5jcmwwHAYDVR0RBBUwE4ER
|
||||
YmFybmV5QHRlbGVub3Iubm8wDQYJKoZIhvcNAQEFBQADggEBAJaaxUGKL0rEgNkr
|
||||
Gs8Hhem2GAEgQbnD1MrTLWbDHVJ/JdeSDOmpruYu+p0Kz4S5A/Jj49PJcGqsBF6p
|
||||
LaJDejRg96ky4UjsxgOsswYuSG7QNRExPQwEZkHmsuyMaPjkvEeFOWBpqYruL1aI
|
||||
ihlF0ISOwicsgpwHbDSuQWFj+TLLizPqLBVf+TWwPFFNXzDeC4golHnzvWk3rRIg
|
||||
4WsdtnfZg9uBpFNsD2oXXivBlMZC43PNnnkbjInN2s6w9CHFMiUEbmifp8r0xYbl
|
||||
Ttn9aXPmFVBudg9zXnqj9NwVSqu7PJr6nwF6XEepo2gcSeA3N3evhwcW5OHXmDkV
|
||||
plFdTNs=
|
||||
-----END CERTIFICATE-----
|
1
vendor/github.com/prometheus/common/config/testdata/basic-auth-password
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/config/testdata/basic-auth-password
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
foobar
|
1
vendor/github.com/prometheus/common/config/testdata/bearer.token
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/config/testdata/bearer.token
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
theanswertothegreatquestionoflifetheuniverseandeverythingisfortytwo
|
3
vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.good.yaml
generated
vendored
Normal file
3
vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.good.yaml
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
basic_auth:
|
||||
username: user
|
||||
password_file: testdata/basic-auth-password
|
2
vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.no-password.yaml
generated
vendored
Normal file
2
vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.no-password.yaml
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
basic_auth:
|
||||
username: user
|
4
vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.too-much.bad.yaml
generated
vendored
Normal file
4
vendor/github.com/prometheus/common/config/testdata/http.conf.basic-auth.too-much.bad.yaml
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
basic_auth:
|
||||
username: user
|
||||
password: foo
|
||||
password_file: testdata/basic-auth-password
|
5
vendor/github.com/prometheus/common/config/testdata/http.conf.bearer-token-and-file-set.bad.yml
generated
vendored
Normal file
5
vendor/github.com/prometheus/common/config/testdata/http.conf.bearer-token-and-file-set.bad.yml
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
basic_auth:
|
||||
username: username
|
||||
password: "mysecret"
|
||||
bearer_token: mysecret
|
||||
bearer_token_file: file
|
4
vendor/github.com/prometheus/common/config/testdata/http.conf.empty.bad.yml
generated
vendored
Normal file
4
vendor/github.com/prometheus/common/config/testdata/http.conf.empty.bad.yml
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
basic_auth:
|
||||
username: username
|
||||
password: mysecret
|
||||
bearer_token_file: file
|
4
vendor/github.com/prometheus/common/config/testdata/http.conf.good.yml
generated
vendored
Normal file
4
vendor/github.com/prometheus/common/config/testdata/http.conf.good.yml
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
basic_auth:
|
||||
username: username
|
||||
password: "mysecret"
|
||||
proxy_url: "http://remote.host"
|
1
vendor/github.com/prometheus/common/config/testdata/http.conf.invalid-bearer-token-file.bad.yml
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/config/testdata/http.conf.invalid-bearer-token-file.bad.yml
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
bearer_token_file: file
|
96
vendor/github.com/prometheus/common/config/testdata/server.crt
generated
vendored
Normal file
96
vendor/github.com/prometheus/common/config/testdata/server.crt
generated
vendored
Normal file
|
@ -0,0 +1,96 @@
|
|||
Certificate:
|
||||
Data:
|
||||
Version: 3 (0x2)
|
||||
Serial Number: 4 (0x4)
|
||||
Signature Algorithm: sha1WithRSAEncryption
|
||||
Issuer: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green TLS CA
|
||||
Validity
|
||||
Not Before: Jul 26 12:47:08 2017 GMT
|
||||
Not After : Jul 26 12:47:08 2019 GMT
|
||||
Subject: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green TLS CA
|
||||
Subject Public Key Info:
|
||||
Public Key Algorithm: rsaEncryption
|
||||
Public-Key: (2048 bit)
|
||||
Modulus:
|
||||
00:97:43:c5:f6:24:b8:ce:30:12:70:ea:17:9c:c0:
|
||||
ce:f2:ef:58:8b:12:7d:46:5e:01:f1:1a:93:b2:3e:
|
||||
d8:cf:99:bc:10:32:f1:12:b0:ef:00:6c:d6:c4:45:
|
||||
85:a8:33:7b:cd:ec:8f:4a:92:d0:5a:4a:41:69:7f:
|
||||
e3:dd:7e:71:d2:21:9c:df:43:b5:6c:60:bb:2a:12:
|
||||
a8:08:cf:c5:ee:08:7d:48:ea:4b:54:e4:82:d9:88:
|
||||
b0:b8:5e:02:12:cb:0e:09:99:b7:5f:42:b6:d7:26:
|
||||
34:0f:4a:e7:fc:ac:9c:59:cd:a1:50:4c:88:5f:f1:
|
||||
d2:7e:5b:21:41:f0:37:50:80:48:71:50:26:61:26:
|
||||
79:64:4b:7e:91:8d:0e:f4:27:fe:19:80:bf:39:55:
|
||||
b7:f3:d0:cd:61:6c:d8:c1:c7:d3:26:77:92:1a:14:
|
||||
42:56:cb:bc:fd:1a:4a:eb:17:d8:8d:af:d1:c0:46:
|
||||
9f:f0:40:5e:0e:34:2f:e7:db:be:66:fd:89:0b:6b:
|
||||
8c:71:c1:0b:0a:c5:c4:c4:eb:7f:44:c1:75:36:23:
|
||||
fd:ed:b6:ee:87:d9:88:47:e1:4b:7c:60:53:e7:85:
|
||||
1c:2f:82:4b:2b:5e:63:1a:49:17:36:2c:fc:39:23:
|
||||
49:22:4d:43:b5:51:22:12:24:9e:31:44:d8:16:4e:
|
||||
a8:eb
|
||||
Exponent: 65537 (0x10001)
|
||||
X509v3 extensions:
|
||||
X509v3 Key Usage: critical
|
||||
Digital Signature, Key Encipherment
|
||||
X509v3 Basic Constraints:
|
||||
CA:FALSE
|
||||
X509v3 Extended Key Usage:
|
||||
TLS Web Server Authentication, TLS Web Client Authentication
|
||||
X509v3 Subject Key Identifier:
|
||||
70:A9:FB:44:66:3C:63:96:E6:05:B2:74:47:C8:18:7E:43:6D:EE:8B
|
||||
X509v3 Authority Key Identifier:
|
||||
keyid:AE:42:88:75:DD:05:A6:8E:48:7F:50:69:F9:B7:34:23:49:B8:B4:71
|
||||
|
||||
Authority Information Access:
|
||||
CA Issuers - URI:http://green.no/ca/tls-ca.cer
|
||||
|
||||
X509v3 CRL Distribution Points:
|
||||
|
||||
Full Name:
|
||||
URI:http://green.no/ca/tls-ca.crl
|
||||
|
||||
X509v3 Subject Alternative Name:
|
||||
IP Address:127.0.0.1, IP Address:127.0.0.0, DNS:localhost
|
||||
Signature Algorithm: sha1WithRSAEncryption
|
||||
56:1e:b8:52:ba:f5:72:42:ad:15:71:c1:5e:00:63:c9:4d:56:
|
||||
f2:8d:a3:a9:91:db:d0:b5:1b:88:80:93:80:28:48:b2:d0:a9:
|
||||
d0:ea:de:40:78:cc:57:8c:00:b8:65:99:68:95:98:9b:fb:a2:
|
||||
43:21:ea:00:37:01:77:c7:3b:1a:ec:58:2d:25:9c:ad:23:41:
|
||||
5e:ae:fd:ac:2f:26:81:b8:a7:49:9b:5a:10:fe:ad:c3:86:ab:
|
||||
59:67:b0:c7:81:72:95:60:b5:cb:fc:9f:ad:27:16:50:85:76:
|
||||
33:16:20:2c:1f:c6:14:09:0c:48:9f:c0:19:16:c9:fa:b0:d8:
|
||||
bf:b7:8d:a7:aa:eb:fe:f8:6f:dd:2b:83:ee:c7:8a:df:c8:59:
|
||||
e6:2e:13:1f:57:cc:6f:31:db:f7:b7:5c:3f:78:ad:22:2c:48:
|
||||
bb:6d:c4:ab:dc:c1:76:34:29:d9:1e:67:e0:ac:37:2b:90:f9:
|
||||
71:bd:cf:a1:01:b9:eb:0b:0b:79:2e:8b:52:3d:8e:13:97:c8:
|
||||
05:a3:ef:68:82:49:12:2a:25:1a:48:49:b8:7c:3c:66:0d:74:
|
||||
f9:00:8c:5b:57:d7:76:b1:26:95:86:b2:2e:a3:b2:9c:e0:eb:
|
||||
2d:fc:77:03:8f:cd:56:46:3a:c9:6a:fa:72:e3:19:d8:ef:de:
|
||||
4b:36:95:79
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEQjCCAyqgAwIBAgIBBDANBgkqhkiG9w0BAQUFADBdMQswCQYDVQQGEwJOTzER
|
||||
MA8GA1UECgwIR3JlZW4gQVMxJDAiBgNVBAsMG0dyZWVuIENlcnRpZmljYXRlIEF1
|
||||
dGhvcml0eTEVMBMGA1UEAwwMR3JlZW4gVExTIENBMB4XDTE3MDcyNjEyNDcwOFoX
|
||||
DTE5MDcyNjEyNDcwOFowXTELMAkGA1UEBhMCTk8xETAPBgNVBAoMCEdyZWVuIEFT
|
||||
MSQwIgYDVQQLDBtHcmVlbiBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFTATBgNVBAMM
|
||||
DEdyZWVuIFRMUyBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJdD
|
||||
xfYkuM4wEnDqF5zAzvLvWIsSfUZeAfEak7I+2M+ZvBAy8RKw7wBs1sRFhagze83s
|
||||
j0qS0FpKQWl/491+cdIhnN9DtWxguyoSqAjPxe4IfUjqS1TkgtmIsLheAhLLDgmZ
|
||||
t19CttcmNA9K5/ysnFnNoVBMiF/x0n5bIUHwN1CASHFQJmEmeWRLfpGNDvQn/hmA
|
||||
vzlVt/PQzWFs2MHH0yZ3khoUQlbLvP0aSusX2I2v0cBGn/BAXg40L+fbvmb9iQtr
|
||||
jHHBCwrFxMTrf0TBdTYj/e227ofZiEfhS3xgU+eFHC+CSyteYxpJFzYs/DkjSSJN
|
||||
Q7VRIhIknjFE2BZOqOsCAwEAAaOCAQswggEHMA4GA1UdDwEB/wQEAwIFoDAJBgNV
|
||||
HRMEAjAAMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAdBgNVHQ4EFgQU
|
||||
cKn7RGY8Y5bmBbJ0R8gYfkNt7oswHwYDVR0jBBgwFoAUrkKIdd0Fpo5If1Bp+bc0
|
||||
I0m4tHEwOQYIKwYBBQUHAQEELTArMCkGCCsGAQUFBzAChh1odHRwOi8vZ3JlZW4u
|
||||
bm8vY2EvdGxzLWNhLmNlcjAuBgNVHR8EJzAlMCOgIaAfhh1odHRwOi8vZ3JlZW4u
|
||||
bm8vY2EvdGxzLWNhLmNybDAgBgNVHREEGTAXhwR/AAABhwR/AAAAgglsb2NhbGhv
|
||||
c3QwDQYJKoZIhvcNAQEFBQADggEBAFYeuFK69XJCrRVxwV4AY8lNVvKNo6mR29C1
|
||||
G4iAk4AoSLLQqdDq3kB4zFeMALhlmWiVmJv7okMh6gA3AXfHOxrsWC0lnK0jQV6u
|
||||
/awvJoG4p0mbWhD+rcOGq1lnsMeBcpVgtcv8n60nFlCFdjMWICwfxhQJDEifwBkW
|
||||
yfqw2L+3jaeq6/74b90rg+7Hit/IWeYuEx9XzG8x2/e3XD94rSIsSLttxKvcwXY0
|
||||
KdkeZ+CsNyuQ+XG9z6EBuesLC3kui1I9jhOXyAWj72iCSRIqJRpISbh8PGYNdPkA
|
||||
jFtX13axJpWGsi6jspzg6y38dwOPzVZGOslq+nLjGdjv3ks2lXk=
|
||||
-----END CERTIFICATE-----
|
28
vendor/github.com/prometheus/common/config/testdata/server.key
generated
vendored
Normal file
28
vendor/github.com/prometheus/common/config/testdata/server.key
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
-----BEGIN PRIVATE KEY-----
|
||||
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCXQ8X2JLjOMBJw
|
||||
6hecwM7y71iLEn1GXgHxGpOyPtjPmbwQMvESsO8AbNbERYWoM3vN7I9KktBaSkFp
|
||||
f+PdfnHSIZzfQ7VsYLsqEqgIz8XuCH1I6ktU5ILZiLC4XgISyw4JmbdfQrbXJjQP
|
||||
Suf8rJxZzaFQTIhf8dJ+WyFB8DdQgEhxUCZhJnlkS36RjQ70J/4ZgL85Vbfz0M1h
|
||||
bNjBx9Mmd5IaFEJWy7z9GkrrF9iNr9HARp/wQF4ONC/n275m/YkLa4xxwQsKxcTE
|
||||
639EwXU2I/3ttu6H2YhH4Ut8YFPnhRwvgksrXmMaSRc2LPw5I0kiTUO1USISJJ4x
|
||||
RNgWTqjrAgMBAAECggEAVurwo4FyV7gzwIIi00XPJLT3ceJL7dUy1HHrEG8gchnq
|
||||
gHxlHdJhYyMnPVydcosyxp75r2YxJtCoSZDdRHbVvGLoGzpy0zW6FnDl8TpCh4aF
|
||||
RxKp+rvbnFf5A9ew5U+cX1PelHRnT7V6EJeAOiaNKOUJnnR7oHX59/UxZQw9HJnX
|
||||
3H4xUdRDmSS3BGKXEswbd7beQjqJtEIkbConfaw32yEod0w2MC0LI4miZ87/6Hsk
|
||||
pyvfpeYxXp4z3BTvFBbf/GEBFuozu63VWHayB9PDmEN/TlphoQpJQihdR2r1lz/H
|
||||
I5QwVlFTDvUSFitNLu+FoaHOfgLprQndbojBXb+tcQKBgQDHCPyM4V7k97RvJgmB
|
||||
ELgZiDYufDrjRLXvFzrrZ7ySU3N+nx3Gz/EhtgbHicDjnRVagHBIwi/QAfBJksCd
|
||||
xcioY5k2OW+8PSTsfFZTAA6XwJp/LGfJik/JjvAVv5CnxBu9lYG4WiSBJFp59ojC
|
||||
zTmfEuB4GPwrjQvzjlqaSpij9QKBgQDCjriwAB2UJIdlgK+DkryLqgim5I4cteB3
|
||||
+juVKz+S8ufFmVvmIXkyDcpyy/26VLC6esy8dV0JoWc4EeitoJvQD1JVZ5+CBTY+
|
||||
r9umx18oe2A/ZgcEf/A3Zd94jM1MwriF6YC+eIOhwhpi7T1xTLf3hc9B0OJ5B1mA
|
||||
vob9rGDtXwKBgD4rkW+UCictNIAvenKFPWxEPuBgT6ij0sx/DhlwCtgOFxprK0rp
|
||||
syFbkVyMq+KtM3lUez5O4c5wfJUOsPnXSOlISxhD8qHy23C/GdvNPcGrGNc2kKjE
|
||||
ek20R0wTzWSJ/jxG0gE6rwJjz5sfJfLrVd9ZbyI0c7hK03vdcHGXcXxtAoGAeGHl
|
||||
BwnbQ3niyTx53VijD2wTVGjhQgSLstEDowYSnTNtk8eTpG6b1gvQc32jLnMOsyQe
|
||||
oJGiEr5q5re2GBDjuDZyxGOMv9/Hs7wOlkCQsbS9Vh0kRHWBRlXjk2zT7yYhFMLp
|
||||
pXFeSW2X9BRFS2CkCCUkm93K9AZHLDE3x6ishNMCgYEAsDsUCzGhI49Aqe+CMP2l
|
||||
WPZl7SEMYS5AtdC5sLtbLYBl8+rMXVGL2opKXqVFYBYkqMJiHGdX3Ub6XSVKLYkN
|
||||
vm4PWmlQS24ZT+jlUl4jk6JU6SAlM/o6ixZl5KNR7yQm6zN2O/RHDeYm0urUQ9tF
|
||||
9dux7LbIFeOoJmoDTWG2+fI=
|
||||
-----END PRIVATE KEY-----
|
172
vendor/github.com/prometheus/common/config/testdata/tls-ca-chain.pem
generated
vendored
Normal file
172
vendor/github.com/prometheus/common/config/testdata/tls-ca-chain.pem
generated
vendored
Normal file
|
@ -0,0 +1,172 @@
|
|||
Certificate:
|
||||
Data:
|
||||
Version: 3 (0x2)
|
||||
Serial Number: 2 (0x2)
|
||||
Signature Algorithm: sha1WithRSAEncryption
|
||||
Issuer: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green Root CA
|
||||
Validity
|
||||
Not Before: Jul 13 03:47:20 2017 GMT
|
||||
Not After : Jul 13 03:47:20 2027 GMT
|
||||
Subject: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green TLS CA
|
||||
Subject Public Key Info:
|
||||
Public Key Algorithm: rsaEncryption
|
||||
Public-Key: (2048 bit)
|
||||
Modulus:
|
||||
00:b5:5a:b3:7a:7f:6a:5b:e9:ee:62:ee:4f:61:42:
|
||||
79:93:06:bf:81:fc:9a:1f:b5:80:83:7c:b3:a6:94:
|
||||
54:58:8a:b1:74:cb:c3:b8:3c:23:a8:69:1f:ca:2b:
|
||||
af:be:97:ba:31:73:b5:b8:ce:d9:bf:bf:9a:7a:cf:
|
||||
3a:64:51:83:c9:36:d2:f7:3b:3a:0e:4c:c7:66:2e:
|
||||
bf:1a:df:ce:10:aa:3d:0f:19:74:03:7e:b5:10:bb:
|
||||
e8:37:bd:62:f0:42:2d:df:3d:ca:70:50:10:17:ce:
|
||||
a9:ec:55:8e:87:6f:ce:9a:04:36:14:96:cb:d1:a5:
|
||||
48:d5:d2:87:02:62:93:4e:21:4a:ff:be:44:f1:d2:
|
||||
7e:ed:74:da:c2:51:26:8e:03:a0:c2:bd:bd:5f:b0:
|
||||
50:11:78:fd:ab:1d:04:86:6c:c1:8d:20:bd:05:5f:
|
||||
51:67:c6:d3:07:95:92:2d:92:90:00:c6:9f:2d:dd:
|
||||
36:5c:dc:78:10:7c:f6:68:39:1d:2c:e0:e1:26:64:
|
||||
4f:36:34:66:a7:84:6a:90:15:3a:94:b7:79:b1:47:
|
||||
f5:d2:51:95:54:bf:92:76:9a:b9:88:ee:63:f9:6c:
|
||||
0d:38:c6:b6:1c:06:43:ed:24:1d:bb:6c:72:48:cc:
|
||||
8c:f4:35:bc:43:fe:a6:96:4c:31:5f:82:0d:0d:20:
|
||||
2a:3d
|
||||
Exponent: 65537 (0x10001)
|
||||
X509v3 extensions:
|
||||
X509v3 Key Usage: critical
|
||||
Certificate Sign, CRL Sign
|
||||
X509v3 Basic Constraints: critical
|
||||
CA:TRUE, pathlen:0
|
||||
X509v3 Subject Key Identifier:
|
||||
AE:42:88:75:DD:05:A6:8E:48:7F:50:69:F9:B7:34:23:49:B8:B4:71
|
||||
X509v3 Authority Key Identifier:
|
||||
keyid:60:93:53:2F:C7:CF:2A:D7:F3:09:28:F6:3C:AE:9C:50:EC:93:63:E5
|
||||
|
||||
Authority Information Access:
|
||||
CA Issuers - URI:http://green.no/ca/root-ca.cer
|
||||
|
||||
X509v3 CRL Distribution Points:
|
||||
|
||||
Full Name:
|
||||
URI:http://green.no/ca/root-ca.crl
|
||||
|
||||
Signature Algorithm: sha1WithRSAEncryption
|
||||
15:a7:ac:d7:25:9e:2a:d4:d1:14:b4:99:38:3d:2f:73:61:2a:
|
||||
d9:b6:8b:13:ea:fe:db:78:d9:0a:6c:df:26:6e:c1:d5:4a:97:
|
||||
42:19:dd:97:05:03:e4:2b:fc:1e:1f:38:3c:4e:b0:3b:8c:38:
|
||||
ad:2b:65:fa:35:2d:81:8e:e0:f6:0a:89:4c:38:97:01:4b:9c:
|
||||
ac:4e:e1:55:17:ef:0a:ad:a7:eb:1e:4b:86:23:12:f1:52:69:
|
||||
cb:a3:8a:ce:fb:14:8b:86:d7:bb:81:5e:bd:2a:c7:a7:79:58:
|
||||
00:10:c0:db:ff:d4:a5:b9:19:74:b3:23:19:4a:1f:78:4b:a8:
|
||||
b6:f6:20:26:c1:69:f9:89:7f:b8:1c:3b:a2:f9:37:31:80:2c:
|
||||
b0:b6:2b:d2:84:44:d7:42:e4:e6:44:51:04:35:d9:1c:a4:48:
|
||||
c6:b7:35:de:f2:ae:da:4b:ba:c8:09:42:8d:ed:7a:81:dc:ed:
|
||||
9d:f0:de:6e:21:b9:01:1c:ad:64:3d:25:4c:91:94:f1:13:18:
|
||||
bb:89:e9:48:ac:05:73:07:c8:db:bd:69:8e:6f:02:9d:b0:18:
|
||||
c0:b9:e1:a8:b1:17:50:3d:ac:05:6e:6f:63:4f:b1:73:33:60:
|
||||
9a:77:d2:81:8a:01:38:43:e9:4c:3c:90:63:a4:99:4b:d2:1b:
|
||||
f9:1b:ec:ee
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIECzCCAvOgAwIBAgIBAjANBgkqhkiG9w0BAQUFADBeMQswCQYDVQQGEwJOTzER
|
||||
MA8GA1UECgwIR3JlZW4gQVMxJDAiBgNVBAsMG0dyZWVuIENlcnRpZmljYXRlIEF1
|
||||
dGhvcml0eTEWMBQGA1UEAwwNR3JlZW4gUm9vdCBDQTAeFw0xNzA3MTMwMzQ3MjBa
|
||||
Fw0yNzA3MTMwMzQ3MjBaMF0xCzAJBgNVBAYTAk5PMREwDwYDVQQKDAhHcmVlbiBB
|
||||
UzEkMCIGA1UECwwbR3JlZW4gQ2VydGlmaWNhdGUgQXV0aG9yaXR5MRUwEwYDVQQD
|
||||
DAxHcmVlbiBUTFMgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC1
|
||||
WrN6f2pb6e5i7k9hQnmTBr+B/JoftYCDfLOmlFRYirF0y8O4PCOoaR/KK6++l7ox
|
||||
c7W4ztm/v5p6zzpkUYPJNtL3OzoOTMdmLr8a384Qqj0PGXQDfrUQu+g3vWLwQi3f
|
||||
PcpwUBAXzqnsVY6Hb86aBDYUlsvRpUjV0ocCYpNOIUr/vkTx0n7tdNrCUSaOA6DC
|
||||
vb1fsFAReP2rHQSGbMGNIL0FX1FnxtMHlZItkpAAxp8t3TZc3HgQfPZoOR0s4OEm
|
||||
ZE82NGanhGqQFTqUt3mxR/XSUZVUv5J2mrmI7mP5bA04xrYcBkPtJB27bHJIzIz0
|
||||
NbxD/qaWTDFfgg0NICo9AgMBAAGjgdQwgdEwDgYDVR0PAQH/BAQDAgEGMBIGA1Ud
|
||||
EwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFK5CiHXdBaaOSH9Qafm3NCNJuLRxMB8G
|
||||
A1UdIwQYMBaAFGCTUy/HzyrX8wko9jyunFDsk2PlMDoGCCsGAQUFBwEBBC4wLDAq
|
||||
BggrBgEFBQcwAoYeaHR0cDovL2dyZWVuLm5vL2NhL3Jvb3QtY2EuY2VyMC8GA1Ud
|
||||
HwQoMCYwJKAioCCGHmh0dHA6Ly9ncmVlbi5uby9jYS9yb290LWNhLmNybDANBgkq
|
||||
hkiG9w0BAQUFAAOCAQEAFaes1yWeKtTRFLSZOD0vc2Eq2baLE+r+23jZCmzfJm7B
|
||||
1UqXQhndlwUD5Cv8Hh84PE6wO4w4rStl+jUtgY7g9gqJTDiXAUucrE7hVRfvCq2n
|
||||
6x5LhiMS8VJpy6OKzvsUi4bXu4FevSrHp3lYABDA2//UpbkZdLMjGUofeEuotvYg
|
||||
JsFp+Yl/uBw7ovk3MYAssLYr0oRE10Lk5kRRBDXZHKRIxrc13vKu2ku6yAlCje16
|
||||
gdztnfDebiG5ARytZD0lTJGU8RMYu4npSKwFcwfI271pjm8CnbAYwLnhqLEXUD2s
|
||||
BW5vY0+xczNgmnfSgYoBOEPpTDyQY6SZS9Ib+Rvs7g==
|
||||
-----END CERTIFICATE-----
|
||||
Certificate:
|
||||
Data:
|
||||
Version: 3 (0x2)
|
||||
Serial Number: 1 (0x1)
|
||||
Signature Algorithm: sha1WithRSAEncryption
|
||||
Issuer: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green Root CA
|
||||
Validity
|
||||
Not Before: Jul 13 03:44:39 2017 GMT
|
||||
Not After : Dec 31 23:59:59 2030 GMT
|
||||
Subject: C=NO, O=Green AS, OU=Green Certificate Authority, CN=Green Root CA
|
||||
Subject Public Key Info:
|
||||
Public Key Algorithm: rsaEncryption
|
||||
Public-Key: (2048 bit)
|
||||
Modulus:
|
||||
00:a7:e8:ed:de:d4:54:08:41:07:40:d5:c0:43:d6:
|
||||
ab:d3:9e:21:87:c6:13:bf:a7:cf:3d:08:4f:c1:fe:
|
||||
8f:e5:6c:c5:89:97:e5:27:75:26:c3:2a:73:2d:34:
|
||||
7c:6f:35:8d:40:66:61:05:c0:eb:e9:b3:38:47:f8:
|
||||
8b:26:35:2c:df:dc:24:31:fe:72:e3:87:10:d1:f7:
|
||||
a0:57:b7:f3:b1:1a:fe:c7:4b:f8:7b:14:6d:73:08:
|
||||
54:eb:63:3c:0c:ce:22:95:5f:3f:f2:6f:89:ae:63:
|
||||
da:80:74:36:21:13:e8:91:01:58:77:cc:c2:f2:42:
|
||||
bf:eb:b3:60:a7:21:ed:88:24:7f:eb:ff:07:41:9b:
|
||||
93:c8:5f:6a:8e:a6:1a:15:3c:bc:e7:0d:fd:05:fd:
|
||||
3c:c1:1c:1d:1f:57:2b:40:27:62:a1:7c:48:63:c1:
|
||||
45:e7:2f:20:ed:92:1c:42:94:e4:58:70:7a:b6:d2:
|
||||
85:c5:61:d8:cd:c6:37:6b:72:3b:7f:af:55:81:d6:
|
||||
9d:dc:10:c9:d8:0e:81:e4:5e:40:13:2f:20:e8:6b:
|
||||
46:81:ce:88:47:dd:38:71:3d:ef:21:cc:c0:67:cf:
|
||||
0a:f4:e9:3f:a8:9d:26:25:2e:23:1e:a3:11:18:cb:
|
||||
d1:70:1c:9e:7d:09:b1:a4:20:dc:95:15:1d:49:cf:
|
||||
1b:ad
|
||||
Exponent: 65537 (0x10001)
|
||||
X509v3 extensions:
|
||||
X509v3 Key Usage: critical
|
||||
Certificate Sign, CRL Sign
|
||||
X509v3 Basic Constraints: critical
|
||||
CA:TRUE
|
||||
X509v3 Subject Key Identifier:
|
||||
60:93:53:2F:C7:CF:2A:D7:F3:09:28:F6:3C:AE:9C:50:EC:93:63:E5
|
||||
X509v3 Authority Key Identifier:
|
||||
keyid:60:93:53:2F:C7:CF:2A:D7:F3:09:28:F6:3C:AE:9C:50:EC:93:63:E5
|
||||
|
||||
Signature Algorithm: sha1WithRSAEncryption
|
||||
a7:77:71:8b:1a:e5:5a:5b:87:54:08:bf:07:3e:cb:99:2f:dc:
|
||||
0e:8d:63:94:95:83:19:c9:92:82:d5:cb:5b:8f:1f:86:55:bc:
|
||||
70:01:1d:33:46:ec:99:de:6b:1f:c3:c2:7a:dd:ef:69:ab:96:
|
||||
58:ec:6c:6f:6c:70:82:71:8a:7f:f0:3b:80:90:d5:64:fa:80:
|
||||
27:b8:7b:50:69:98:4b:37:99:ad:bf:a2:5b:93:22:5e:96:44:
|
||||
3c:5a:cf:0c:f4:62:63:4a:6f:72:a7:f6:89:1d:09:26:3d:8f:
|
||||
a8:86:d4:b4:bc:dd:b3:38:ca:c0:59:16:8c:20:1f:89:35:12:
|
||||
b4:2d:c0:e9:de:93:e0:39:76:32:fc:80:db:da:44:26:fd:01:
|
||||
32:74:97:f8:44:ae:fe:05:b1:34:96:13:34:56:73:b4:93:a5:
|
||||
55:56:d1:01:51:9d:9c:55:e7:38:53:28:12:4e:38:72:0c:8f:
|
||||
bd:91:4c:45:48:3b:e1:0d:03:5f:58:40:c9:d3:a0:ac:b3:89:
|
||||
ce:af:27:8a:0f:ab:ec:72:4d:40:77:30:6b:36:fd:32:46:9f:
|
||||
ee:f9:c4:f5:17:06:0f:4b:d3:88:f5:a4:2f:3d:87:9e:f5:26:
|
||||
74:f0:c9:dc:cb:ad:d9:a7:8a:d3:71:15:00:d3:5d:9f:4c:59:
|
||||
3e:24:63:f5
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDnDCCAoSgAwIBAgIBATANBgkqhkiG9w0BAQUFADBeMQswCQYDVQQGEwJOTzER
|
||||
MA8GA1UECgwIR3JlZW4gQVMxJDAiBgNVBAsMG0dyZWVuIENlcnRpZmljYXRlIEF1
|
||||
dGhvcml0eTEWMBQGA1UEAwwNR3JlZW4gUm9vdCBDQTAgFw0xNzA3MTMwMzQ0Mzla
|
||||
GA8yMDMwMTIzMTIzNTk1OVowXjELMAkGA1UEBhMCTk8xETAPBgNVBAoMCEdyZWVu
|
||||
IEFTMSQwIgYDVQQLDBtHcmVlbiBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFjAUBgNV
|
||||
BAMMDUdyZWVuIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
|
||||
AQCn6O3e1FQIQQdA1cBD1qvTniGHxhO/p889CE/B/o/lbMWJl+UndSbDKnMtNHxv
|
||||
NY1AZmEFwOvpszhH+IsmNSzf3CQx/nLjhxDR96BXt/OxGv7HS/h7FG1zCFTrYzwM
|
||||
ziKVXz/yb4muY9qAdDYhE+iRAVh3zMLyQr/rs2CnIe2IJH/r/wdBm5PIX2qOphoV
|
||||
PLznDf0F/TzBHB0fVytAJ2KhfEhjwUXnLyDtkhxClORYcHq20oXFYdjNxjdrcjt/
|
||||
r1WB1p3cEMnYDoHkXkATLyDoa0aBzohH3ThxPe8hzMBnzwr06T+onSYlLiMeoxEY
|
||||
y9FwHJ59CbGkINyVFR1JzxutAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNV
|
||||
HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRgk1Mvx88q1/MJKPY8rpxQ7JNj5TAfBgNV
|
||||
HSMEGDAWgBRgk1Mvx88q1/MJKPY8rpxQ7JNj5TANBgkqhkiG9w0BAQUFAAOCAQEA
|
||||
p3dxixrlWluHVAi/Bz7LmS/cDo1jlJWDGcmSgtXLW48fhlW8cAEdM0bsmd5rH8PC
|
||||
et3vaauWWOxsb2xwgnGKf/A7gJDVZPqAJ7h7UGmYSzeZrb+iW5MiXpZEPFrPDPRi
|
||||
Y0pvcqf2iR0JJj2PqIbUtLzdszjKwFkWjCAfiTUStC3A6d6T4Dl2MvyA29pEJv0B
|
||||
MnSX+ESu/gWxNJYTNFZztJOlVVbRAVGdnFXnOFMoEk44cgyPvZFMRUg74Q0DX1hA
|
||||
ydOgrLOJzq8nig+r7HJNQHcwazb9Mkaf7vnE9RcGD0vTiPWkLz2HnvUmdPDJ3Mut
|
||||
2aeK03EVANNdn0xZPiRj9Q==
|
||||
-----END CERTIFICATE-----
|
1
vendor/github.com/prometheus/common/config/testdata/tls_config.cert_no_key.bad.yml
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/config/testdata/tls_config.cert_no_key.bad.yml
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
cert_file: somefile
|
0
vendor/github.com/prometheus/common/config/testdata/tls_config.empty.good.yml
generated
vendored
Normal file
0
vendor/github.com/prometheus/common/config/testdata/tls_config.empty.good.yml
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/config/testdata/tls_config.insecure.good.yml
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/config/testdata/tls_config.insecure.good.yml
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
insecure_skip_verify: true
|
1
vendor/github.com/prometheus/common/config/testdata/tls_config.invalid_field.bad.yml
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/config/testdata/tls_config.invalid_field.bad.yml
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
something_invalid: true
|
1
vendor/github.com/prometheus/common/config/testdata/tls_config.key_no_cert.bad.yml
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/config/testdata/tls_config.key_no_cert.bad.yml
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
key_file: somefile
|
62
vendor/github.com/prometheus/common/config/tls_config_test.go
generated
vendored
Normal file
62
vendor/github.com/prometheus/common/config/tls_config_test.go
generated
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
// Copyright 2016 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package config
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"io/ioutil"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// LoadTLSConfig parses the given YAML file into a tls.Config.
|
||||
func LoadTLSConfig(filename string) (*tls.Config, error) {
|
||||
content, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cfg := TLSConfig{}
|
||||
if err = yaml.UnmarshalStrict(content, &cfg); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return NewTLSConfig(&cfg)
|
||||
}
|
||||
|
||||
var expectedTLSConfigs = []struct {
|
||||
filename string
|
||||
config *tls.Config
|
||||
}{
|
||||
{
|
||||
filename: "tls_config.empty.good.yml",
|
||||
config: &tls.Config{},
|
||||
}, {
|
||||
filename: "tls_config.insecure.good.yml",
|
||||
config: &tls.Config{InsecureSkipVerify: true},
|
||||
},
|
||||
}
|
||||
|
||||
func TestValidTLSConfig(t *testing.T) {
|
||||
for _, cfg := range expectedTLSConfigs {
|
||||
cfg.config.BuildNameToCertificate()
|
||||
got, err := LoadTLSConfig("testdata/" + cfg.filename)
|
||||
if err != nil {
|
||||
t.Errorf("Error parsing %s: %s", cfg.filename, err)
|
||||
}
|
||||
if !reflect.DeepEqual(*got, *cfg.config) {
|
||||
t.Fatalf("%v: unexpected config result: \n\n%v\n expected\n\n%v", cfg.filename, got, cfg.config)
|
||||
}
|
||||
}
|
||||
}
|
2
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_0
generated
vendored
Normal file
2
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_0
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
|
||||
|
6
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_1
generated
vendored
Normal file
6
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_1
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
|
||||
minimal_metric 1.234
|
||||
another_metric -3e3 103948
|
||||
# Even that:
|
||||
no_labels{} 3
|
||||
# HELP line for non-existing metric will be ignored.
|
12
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_2
generated
vendored
Normal file
12
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_2
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
|
||||
# A normal comment.
|
||||
#
|
||||
# TYPE name counter
|
||||
name{labelname="val1",basename="basevalue"} NaN
|
||||
name {labelname="val2",basename="base\"v\\al\nue"} 0.23 1234567890
|
||||
# HELP name two-line\n doc str\\ing
|
||||
|
||||
# HELP name2 doc str"ing 2
|
||||
# TYPE name2 gauge
|
||||
name2{labelname="val2" ,basename = "basevalue2" } +Inf 54321
|
||||
name2{ labelname = "val1" , }-Inf
|
22
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_3
generated
vendored
Normal file
22
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_3
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
|
||||
# TYPE my_summary summary
|
||||
my_summary{n1="val1",quantile="0.5"} 110
|
||||
decoy -1 -2
|
||||
my_summary{n1="val1",quantile="0.9"} 140 1
|
||||
my_summary_count{n1="val1"} 42
|
||||
# Latest timestamp wins in case of a summary.
|
||||
my_summary_sum{n1="val1"} 4711 2
|
||||
fake_sum{n1="val1"} 2001
|
||||
# TYPE another_summary summary
|
||||
another_summary_count{n2="val2",n1="val1"} 20
|
||||
my_summary_count{n2="val2",n1="val1"} 5 5
|
||||
another_summary{n1="val1",n2="val2",quantile=".3"} -1.2
|
||||
my_summary_sum{n1="val2"} 08 15
|
||||
my_summary{n1="val3", quantile="0.2"} 4711
|
||||
my_summary{n1="val1",n2="val2",quantile="-12.34",} NaN
|
||||
# some
|
||||
# funny comments
|
||||
# HELP
|
||||
# HELP
|
||||
# HELP my_summary
|
||||
# HELP my_summary
|
10
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_4
generated
vendored
Normal file
10
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_4
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
|
||||
# HELP request_duration_microseconds The response latency.
|
||||
# TYPE request_duration_microseconds histogram
|
||||
request_duration_microseconds_bucket{le="100"} 123
|
||||
request_duration_microseconds_bucket{le="120"} 412
|
||||
request_duration_microseconds_bucket{le="144"} 592
|
||||
request_duration_microseconds_bucket{le="172.8"} 1524
|
||||
request_duration_microseconds_bucket{le="+Inf"} 2693
|
||||
request_duration_microseconds_sum 1.7560473e+06
|
||||
request_duration_microseconds_count 2693
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_0
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_0
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
bla 3.14
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_1
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_1
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
metric{label="\t"} 3.14
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_10
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_10
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
metric{label="bla"} 3.14 2 3
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_11
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_11
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
metric{label="bla"} blubb
|
3
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_12
generated
vendored
Normal file
3
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_12
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
# HELP metric one
|
||||
# HELP metric two
|
3
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_13
generated
vendored
Normal file
3
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_13
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
# TYPE metric counter
|
||||
# TYPE metric untyped
|
3
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_14
generated
vendored
Normal file
3
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_14
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
metric 4.12
|
||||
# TYPE metric counter
|
2
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_15
generated
vendored
Normal file
2
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_15
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
|
||||
# TYPE metric bla
|
2
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_16
generated
vendored
Normal file
2
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_16
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
|
||||
# TYPE met-ric
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_17
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_17
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
@invalidmetric{label="bla"} 3.14 2
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_18
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_18
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{label="bla"} 3.14 2
|
3
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_19
generated
vendored
Normal file
3
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_19
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
# TYPE metric histogram
|
||||
metric_bucket{le="bla"} 3.14
|
3
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_2
generated
vendored
Normal file
3
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_2
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
metric{label="new
|
||||
line"} 3.14
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_3
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_3
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
metric{@="bla"} 3.14
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_4
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_4
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
metric{__name__="bla"} 3.14
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_5
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_5
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
metric{label+="bla"} 3.14
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_6
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_6
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
metric{label=bla} 3.14
|
3
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_7
generated
vendored
Normal file
3
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_7
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
# TYPE metric summary
|
||||
metric{quantile="bla"} 3.14
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_8
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_8
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
metric{label="bla"+} 3.14
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_9
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/from_test_parse_error_9
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
metric{label="bla"} 3.14 2.72
|
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/minimal
generated
vendored
Normal file
1
vendor/github.com/prometheus/common/expfmt/fuzz/corpus/minimal
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
m{} 0
|
46
vendor/github.com/prometheus/common/expfmt/testdata/json2
generated
vendored
Normal file
46
vendor/github.com/prometheus/common/expfmt/testdata/json2
generated
vendored
Normal file
|
@ -0,0 +1,46 @@
|
|||
[
|
||||
{
|
||||
"baseLabels": {
|
||||
"__name__": "rpc_calls_total",
|
||||
"job": "batch_job"
|
||||
},
|
||||
"docstring": "RPC calls.",
|
||||
"metric": {
|
||||
"type": "counter",
|
||||
"value": [
|
||||
{
|
||||
"labels": {
|
||||
"service": "zed"
|
||||
},
|
||||
"value": 25
|
||||
},
|
||||
{
|
||||
"labels": {
|
||||
"service": "bar"
|
||||
},
|
||||
"value": 24
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"baseLabels": {
|
||||
"__name__": "rpc_latency_microseconds"
|
||||
},
|
||||
"docstring": "RPC latency.",
|
||||
"metric": {
|
||||
"type": "histogram",
|
||||
"value": [
|
||||
{
|
||||
"labels": {
|
||||
"service": "foo"
|
||||
},
|
||||
"value": {
|
||||
"0.010000": 15,
|
||||
"0.990000": 17
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
46
vendor/github.com/prometheus/common/expfmt/testdata/json2_bad
generated
vendored
Normal file
46
vendor/github.com/prometheus/common/expfmt/testdata/json2_bad
generated
vendored
Normal file
|
@ -0,0 +1,46 @@
|
|||
[
|
||||
{
|
||||
"baseLabels": {
|
||||
"__name__": "rpc_calls_total",
|
||||
"job": "batch_job"
|
||||
},
|
||||
"docstring": "RPC calls.",
|
||||
"metric": {
|
||||
"type": "counter",
|
||||
"value": [
|
||||
{
|
||||
"labels": {
|
||||
"servic|e": "zed"
|
||||
},
|
||||
"value": 25
|
||||
},
|
||||
{
|
||||
"labels": {
|
||||
"service": "bar"
|
||||
},
|
||||
"value": 24
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"baseLabels": {
|
||||
"__name__": "rpc_latency_microseconds"
|
||||
},
|
||||
"docstring": "RPC latency.",
|
||||
"metric": {
|
||||
"type": "histogram",
|
||||
"value": [
|
||||
{
|
||||
"labels": {
|
||||
"service": "foo"
|
||||
},
|
||||
"value": {
|
||||
"0.010000": 15,
|
||||
"0.990000": 17
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
BIN
vendor/github.com/prometheus/common/expfmt/testdata/protobuf
generated
vendored
Normal file
BIN
vendor/github.com/prometheus/common/expfmt/testdata/protobuf
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/prometheus/common/expfmt/testdata/protobuf.gz
generated
vendored
Normal file
BIN
vendor/github.com/prometheus/common/expfmt/testdata/protobuf.gz
generated
vendored
Normal file
Binary file not shown.
322
vendor/github.com/prometheus/common/expfmt/testdata/text
generated
vendored
Normal file
322
vendor/github.com/prometheus/common/expfmt/testdata/text
generated
vendored
Normal file
|
@ -0,0 +1,322 @@
|
|||
# HELP http_request_duration_microseconds The HTTP request latencies in microseconds.
|
||||
# TYPE http_request_duration_microseconds summary
|
||||
http_request_duration_microseconds{handler="/",quantile="0.5"} 0
|
||||
http_request_duration_microseconds{handler="/",quantile="0.9"} 0
|
||||
http_request_duration_microseconds{handler="/",quantile="0.99"} 0
|
||||
http_request_duration_microseconds_sum{handler="/"} 0
|
||||
http_request_duration_microseconds_count{handler="/"} 0
|
||||
http_request_duration_microseconds{handler="/alerts",quantile="0.5"} 0
|
||||
http_request_duration_microseconds{handler="/alerts",quantile="0.9"} 0
|
||||
http_request_duration_microseconds{handler="/alerts",quantile="0.99"} 0
|
||||
http_request_duration_microseconds_sum{handler="/alerts"} 0
|
||||
http_request_duration_microseconds_count{handler="/alerts"} 0
|
||||
http_request_duration_microseconds{handler="/api/metrics",quantile="0.5"} 0
|
||||
http_request_duration_microseconds{handler="/api/metrics",quantile="0.9"} 0
|
||||
http_request_duration_microseconds{handler="/api/metrics",quantile="0.99"} 0
|
||||
http_request_duration_microseconds_sum{handler="/api/metrics"} 0
|
||||
http_request_duration_microseconds_count{handler="/api/metrics"} 0
|
||||
http_request_duration_microseconds{handler="/api/query",quantile="0.5"} 0
|
||||
http_request_duration_microseconds{handler="/api/query",quantile="0.9"} 0
|
||||
http_request_duration_microseconds{handler="/api/query",quantile="0.99"} 0
|
||||
http_request_duration_microseconds_sum{handler="/api/query"} 0
|
||||
http_request_duration_microseconds_count{handler="/api/query"} 0
|
||||
http_request_duration_microseconds{handler="/api/query_range",quantile="0.5"} 0
|
||||
http_request_duration_microseconds{handler="/api/query_range",quantile="0.9"} 0
|
||||
http_request_duration_microseconds{handler="/api/query_range",quantile="0.99"} 0
|
||||
http_request_duration_microseconds_sum{handler="/api/query_range"} 0
|
||||
http_request_duration_microseconds_count{handler="/api/query_range"} 0
|
||||
http_request_duration_microseconds{handler="/api/targets",quantile="0.5"} 0
|
||||
http_request_duration_microseconds{handler="/api/targets",quantile="0.9"} 0
|
||||
http_request_duration_microseconds{handler="/api/targets",quantile="0.99"} 0
|
||||
http_request_duration_microseconds_sum{handler="/api/targets"} 0
|
||||
http_request_duration_microseconds_count{handler="/api/targets"} 0
|
||||
http_request_duration_microseconds{handler="/consoles/",quantile="0.5"} 0
|
||||
http_request_duration_microseconds{handler="/consoles/",quantile="0.9"} 0
|
||||
http_request_duration_microseconds{handler="/consoles/",quantile="0.99"} 0
|
||||
http_request_duration_microseconds_sum{handler="/consoles/"} 0
|
||||
http_request_duration_microseconds_count{handler="/consoles/"} 0
|
||||
http_request_duration_microseconds{handler="/graph",quantile="0.5"} 0
|
||||
http_request_duration_microseconds{handler="/graph",quantile="0.9"} 0
|
||||
http_request_duration_microseconds{handler="/graph",quantile="0.99"} 0
|
||||
http_request_duration_microseconds_sum{handler="/graph"} 0
|
||||
http_request_duration_microseconds_count{handler="/graph"} 0
|
||||
http_request_duration_microseconds{handler="/heap",quantile="0.5"} 0
|
||||
http_request_duration_microseconds{handler="/heap",quantile="0.9"} 0
|
||||
http_request_duration_microseconds{handler="/heap",quantile="0.99"} 0
|
||||
http_request_duration_microseconds_sum{handler="/heap"} 0
|
||||
http_request_duration_microseconds_count{handler="/heap"} 0
|
||||
http_request_duration_microseconds{handler="/static/",quantile="0.5"} 0
|
||||
http_request_duration_microseconds{handler="/static/",quantile="0.9"} 0
|
||||
http_request_duration_microseconds{handler="/static/",quantile="0.99"} 0
|
||||
http_request_duration_microseconds_sum{handler="/static/"} 0
|
||||
http_request_duration_microseconds_count{handler="/static/"} 0
|
||||
http_request_duration_microseconds{handler="prometheus",quantile="0.5"} 1307.275
|
||||
http_request_duration_microseconds{handler="prometheus",quantile="0.9"} 1858.632
|
||||
http_request_duration_microseconds{handler="prometheus",quantile="0.99"} 3087.384
|
||||
http_request_duration_microseconds_sum{handler="prometheus"} 179886.5000000001
|
||||
http_request_duration_microseconds_count{handler="prometheus"} 119
|
||||
# HELP http_request_size_bytes The HTTP request sizes in bytes.
|
||||
# TYPE http_request_size_bytes summary
|
||||
http_request_size_bytes{handler="/",quantile="0.5"} 0
|
||||
http_request_size_bytes{handler="/",quantile="0.9"} 0
|
||||
http_request_size_bytes{handler="/",quantile="0.99"} 0
|
||||
http_request_size_bytes_sum{handler="/"} 0
|
||||
http_request_size_bytes_count{handler="/"} 0
|
||||
http_request_size_bytes{handler="/alerts",quantile="0.5"} 0
|
||||
http_request_size_bytes{handler="/alerts",quantile="0.9"} 0
|
||||
http_request_size_bytes{handler="/alerts",quantile="0.99"} 0
|
||||
http_request_size_bytes_sum{handler="/alerts"} 0
|
||||
http_request_size_bytes_count{handler="/alerts"} 0
|
||||
http_request_size_bytes{handler="/api/metrics",quantile="0.5"} 0
|
||||
http_request_size_bytes{handler="/api/metrics",quantile="0.9"} 0
|
||||
http_request_size_bytes{handler="/api/metrics",quantile="0.99"} 0
|
||||
http_request_size_bytes_sum{handler="/api/metrics"} 0
|
||||
http_request_size_bytes_count{handler="/api/metrics"} 0
|
||||
http_request_size_bytes{handler="/api/query",quantile="0.5"} 0
|
||||
http_request_size_bytes{handler="/api/query",quantile="0.9"} 0
|
||||
http_request_size_bytes{handler="/api/query",quantile="0.99"} 0
|
||||
http_request_size_bytes_sum{handler="/api/query"} 0
|
||||
http_request_size_bytes_count{handler="/api/query"} 0
|
||||
http_request_size_bytes{handler="/api/query_range",quantile="0.5"} 0
|
||||
http_request_size_bytes{handler="/api/query_range",quantile="0.9"} 0
|
||||
http_request_size_bytes{handler="/api/query_range",quantile="0.99"} 0
|
||||
http_request_size_bytes_sum{handler="/api/query_range"} 0
|
||||
http_request_size_bytes_count{handler="/api/query_range"} 0
|
||||
http_request_size_bytes{handler="/api/targets",quantile="0.5"} 0
|
||||
http_request_size_bytes{handler="/api/targets",quantile="0.9"} 0
|
||||
http_request_size_bytes{handler="/api/targets",quantile="0.99"} 0
|
||||
http_request_size_bytes_sum{handler="/api/targets"} 0
|
||||
http_request_size_bytes_count{handler="/api/targets"} 0
|
||||
http_request_size_bytes{handler="/consoles/",quantile="0.5"} 0
|
||||
http_request_size_bytes{handler="/consoles/",quantile="0.9"} 0
|
||||
http_request_size_bytes{handler="/consoles/",quantile="0.99"} 0
|
||||
http_request_size_bytes_sum{handler="/consoles/"} 0
|
||||
http_request_size_bytes_count{handler="/consoles/"} 0
|
||||
http_request_size_bytes{handler="/graph",quantile="0.5"} 0
|
||||
http_request_size_bytes{handler="/graph",quantile="0.9"} 0
|
||||
http_request_size_bytes{handler="/graph",quantile="0.99"} 0
|
||||
http_request_size_bytes_sum{handler="/graph"} 0
|
||||
http_request_size_bytes_count{handler="/graph"} 0
|
||||
http_request_size_bytes{handler="/heap",quantile="0.5"} 0
|
||||
http_request_size_bytes{handler="/heap",quantile="0.9"} 0
|
||||
http_request_size_bytes{handler="/heap",quantile="0.99"} 0
|
||||
http_request_size_bytes_sum{handler="/heap"} 0
|
||||
http_request_size_bytes_count{handler="/heap"} 0
|
||||
http_request_size_bytes{handler="/static/",quantile="0.5"} 0
|
||||
http_request_size_bytes{handler="/static/",quantile="0.9"} 0
|
||||
http_request_size_bytes{handler="/static/",quantile="0.99"} 0
|
||||
http_request_size_bytes_sum{handler="/static/"} 0
|
||||
http_request_size_bytes_count{handler="/static/"} 0
|
||||
http_request_size_bytes{handler="prometheus",quantile="0.5"} 291
|
||||
http_request_size_bytes{handler="prometheus",quantile="0.9"} 291
|
||||
http_request_size_bytes{handler="prometheus",quantile="0.99"} 291
|
||||
http_request_size_bytes_sum{handler="prometheus"} 34488
|
||||
http_request_size_bytes_count{handler="prometheus"} 119
|
||||
# HELP http_requests_total Total number of HTTP requests made.
|
||||
# TYPE http_requests_total counter
|
||||
http_requests_total{code="200",handler="prometheus",method="get"} 119
|
||||
# HELP http_response_size_bytes The HTTP response sizes in bytes.
|
||||
# TYPE http_response_size_bytes summary
|
||||
http_response_size_bytes{handler="/",quantile="0.5"} 0
|
||||
http_response_size_bytes{handler="/",quantile="0.9"} 0
|
||||
http_response_size_bytes{handler="/",quantile="0.99"} 0
|
||||
http_response_size_bytes_sum{handler="/"} 0
|
||||
http_response_size_bytes_count{handler="/"} 0
|
||||
http_response_size_bytes{handler="/alerts",quantile="0.5"} 0
|
||||
http_response_size_bytes{handler="/alerts",quantile="0.9"} 0
|
||||
http_response_size_bytes{handler="/alerts",quantile="0.99"} 0
|
||||
http_response_size_bytes_sum{handler="/alerts"} 0
|
||||
http_response_size_bytes_count{handler="/alerts"} 0
|
||||
http_response_size_bytes{handler="/api/metrics",quantile="0.5"} 0
|
||||
http_response_size_bytes{handler="/api/metrics",quantile="0.9"} 0
|
||||
http_response_size_bytes{handler="/api/metrics",quantile="0.99"} 0
|
||||
http_response_size_bytes_sum{handler="/api/metrics"} 0
|
||||
http_response_size_bytes_count{handler="/api/metrics"} 0
|
||||
http_response_size_bytes{handler="/api/query",quantile="0.5"} 0
|
||||
http_response_size_bytes{handler="/api/query",quantile="0.9"} 0
|
||||
http_response_size_bytes{handler="/api/query",quantile="0.99"} 0
|
||||
http_response_size_bytes_sum{handler="/api/query"} 0
|
||||
http_response_size_bytes_count{handler="/api/query"} 0
|
||||
http_response_size_bytes{handler="/api/query_range",quantile="0.5"} 0
|
||||
http_response_size_bytes{handler="/api/query_range",quantile="0.9"} 0
|
||||
http_response_size_bytes{handler="/api/query_range",quantile="0.99"} 0
|
||||
http_response_size_bytes_sum{handler="/api/query_range"} 0
|
||||
http_response_size_bytes_count{handler="/api/query_range"} 0
|
||||
http_response_size_bytes{handler="/api/targets",quantile="0.5"} 0
|
||||
http_response_size_bytes{handler="/api/targets",quantile="0.9"} 0
|
||||
http_response_size_bytes{handler="/api/targets",quantile="0.99"} 0
|
||||
http_response_size_bytes_sum{handler="/api/targets"} 0
|
||||
http_response_size_bytes_count{handler="/api/targets"} 0
|
||||
http_response_size_bytes{handler="/consoles/",quantile="0.5"} 0
|
||||
http_response_size_bytes{handler="/consoles/",quantile="0.9"} 0
|
||||
http_response_size_bytes{handler="/consoles/",quantile="0.99"} 0
|
||||
http_response_size_bytes_sum{handler="/consoles/"} 0
|
||||
http_response_size_bytes_count{handler="/consoles/"} 0
|
||||
http_response_size_bytes{handler="/graph",quantile="0.5"} 0
|
||||
http_response_size_bytes{handler="/graph",quantile="0.9"} 0
|
||||
http_response_size_bytes{handler="/graph",quantile="0.99"} 0
|
||||
http_response_size_bytes_sum{handler="/graph"} 0
|
||||
http_response_size_bytes_count{handler="/graph"} 0
|
||||
http_response_size_bytes{handler="/heap",quantile="0.5"} 0
|
||||
http_response_size_bytes{handler="/heap",quantile="0.9"} 0
|
||||
http_response_size_bytes{handler="/heap",quantile="0.99"} 0
|
||||
http_response_size_bytes_sum{handler="/heap"} 0
|
||||
http_response_size_bytes_count{handler="/heap"} 0
|
||||
http_response_size_bytes{handler="/static/",quantile="0.5"} 0
|
||||
http_response_size_bytes{handler="/static/",quantile="0.9"} 0
|
||||
http_response_size_bytes{handler="/static/",quantile="0.99"} 0
|
||||
http_response_size_bytes_sum{handler="/static/"} 0
|
||||
http_response_size_bytes_count{handler="/static/"} 0
|
||||
http_response_size_bytes{handler="prometheus",quantile="0.5"} 2049
|
||||
http_response_size_bytes{handler="prometheus",quantile="0.9"} 2058
|
||||
http_response_size_bytes{handler="prometheus",quantile="0.99"} 2064
|
||||
http_response_size_bytes_sum{handler="prometheus"} 247001
|
||||
http_response_size_bytes_count{handler="prometheus"} 119
|
||||
# HELP process_cpu_seconds_total Total user and system CPU time spent in seconds.
|
||||
# TYPE process_cpu_seconds_total counter
|
||||
process_cpu_seconds_total 0.55
|
||||
# HELP go_goroutines Number of goroutines that currently exist.
|
||||
# TYPE go_goroutines gauge
|
||||
go_goroutines 70
|
||||
# HELP process_max_fds Maximum number of open file descriptors.
|
||||
# TYPE process_max_fds gauge
|
||||
process_max_fds 8192
|
||||
# HELP process_open_fds Number of open file descriptors.
|
||||
# TYPE process_open_fds gauge
|
||||
process_open_fds 29
|
||||
# HELP process_resident_memory_bytes Resident memory size in bytes.
|
||||
# TYPE process_resident_memory_bytes gauge
|
||||
process_resident_memory_bytes 5.3870592e+07
|
||||
# HELP process_start_time_seconds Start time of the process since unix epoch in seconds.
|
||||
# TYPE process_start_time_seconds gauge
|
||||
process_start_time_seconds 1.42236894836e+09
|
||||
# HELP process_virtual_memory_bytes Virtual memory size in bytes.
|
||||
# TYPE process_virtual_memory_bytes gauge
|
||||
process_virtual_memory_bytes 5.41478912e+08
|
||||
# HELP prometheus_dns_sd_lookup_failures_total The number of DNS-SD lookup failures.
|
||||
# TYPE prometheus_dns_sd_lookup_failures_total counter
|
||||
prometheus_dns_sd_lookup_failures_total 0
|
||||
# HELP prometheus_dns_sd_lookups_total The number of DNS-SD lookups.
|
||||
# TYPE prometheus_dns_sd_lookups_total counter
|
||||
prometheus_dns_sd_lookups_total 7
|
||||
# HELP prometheus_evaluator_duration_milliseconds The duration for all evaluations to execute.
|
||||
# TYPE prometheus_evaluator_duration_milliseconds summary
|
||||
prometheus_evaluator_duration_milliseconds{quantile="0.01"} 0
|
||||
prometheus_evaluator_duration_milliseconds{quantile="0.05"} 0
|
||||
prometheus_evaluator_duration_milliseconds{quantile="0.5"} 0
|
||||
prometheus_evaluator_duration_milliseconds{quantile="0.9"} 1
|
||||
prometheus_evaluator_duration_milliseconds{quantile="0.99"} 1
|
||||
prometheus_evaluator_duration_milliseconds_sum 12
|
||||
prometheus_evaluator_duration_milliseconds_count 23
|
||||
# HELP prometheus_local_storage_checkpoint_duration_milliseconds The duration (in milliseconds) it took to checkpoint in-memory metrics and head chunks.
|
||||
# TYPE prometheus_local_storage_checkpoint_duration_milliseconds gauge
|
||||
prometheus_local_storage_checkpoint_duration_milliseconds 0
|
||||
# HELP prometheus_local_storage_chunk_ops_total The total number of chunk operations by their type.
|
||||
# TYPE prometheus_local_storage_chunk_ops_total counter
|
||||
prometheus_local_storage_chunk_ops_total{type="create"} 598
|
||||
prometheus_local_storage_chunk_ops_total{type="persist"} 174
|
||||
prometheus_local_storage_chunk_ops_total{type="pin"} 920
|
||||
prometheus_local_storage_chunk_ops_total{type="transcode"} 415
|
||||
prometheus_local_storage_chunk_ops_total{type="unpin"} 920
|
||||
# HELP prometheus_local_storage_indexing_batch_latency_milliseconds Quantiles for batch indexing latencies in milliseconds.
|
||||
# TYPE prometheus_local_storage_indexing_batch_latency_milliseconds summary
|
||||
prometheus_local_storage_indexing_batch_latency_milliseconds{quantile="0.5"} 0
|
||||
prometheus_local_storage_indexing_batch_latency_milliseconds{quantile="0.9"} 0
|
||||
prometheus_local_storage_indexing_batch_latency_milliseconds{quantile="0.99"} 0
|
||||
prometheus_local_storage_indexing_batch_latency_milliseconds_sum 0
|
||||
prometheus_local_storage_indexing_batch_latency_milliseconds_count 1
|
||||
# HELP prometheus_local_storage_indexing_batch_sizes Quantiles for indexing batch sizes (number of metrics per batch).
|
||||
# TYPE prometheus_local_storage_indexing_batch_sizes summary
|
||||
prometheus_local_storage_indexing_batch_sizes{quantile="0.5"} 2
|
||||
prometheus_local_storage_indexing_batch_sizes{quantile="0.9"} 2
|
||||
prometheus_local_storage_indexing_batch_sizes{quantile="0.99"} 2
|
||||
prometheus_local_storage_indexing_batch_sizes_sum 2
|
||||
prometheus_local_storage_indexing_batch_sizes_count 1
|
||||
# HELP prometheus_local_storage_indexing_queue_capacity The capacity of the indexing queue.
|
||||
# TYPE prometheus_local_storage_indexing_queue_capacity gauge
|
||||
prometheus_local_storage_indexing_queue_capacity 16384
|
||||
# HELP prometheus_local_storage_indexing_queue_length The number of metrics waiting to be indexed.
|
||||
# TYPE prometheus_local_storage_indexing_queue_length gauge
|
||||
prometheus_local_storage_indexing_queue_length 0
|
||||
# HELP prometheus_local_storage_ingested_samples_total The total number of samples ingested.
|
||||
# TYPE prometheus_local_storage_ingested_samples_total counter
|
||||
prometheus_local_storage_ingested_samples_total 30473
|
||||
# HELP prometheus_local_storage_invalid_preload_requests_total The total number of preload requests referring to a non-existent series. This is an indication of outdated label indexes.
|
||||
# TYPE prometheus_local_storage_invalid_preload_requests_total counter
|
||||
prometheus_local_storage_invalid_preload_requests_total 0
|
||||
# HELP prometheus_local_storage_memory_chunkdescs The current number of chunk descriptors in memory.
|
||||
# TYPE prometheus_local_storage_memory_chunkdescs gauge
|
||||
prometheus_local_storage_memory_chunkdescs 1059
|
||||
# HELP prometheus_local_storage_memory_chunks The current number of chunks in memory, excluding cloned chunks (i.e. chunks without a descriptor).
|
||||
# TYPE prometheus_local_storage_memory_chunks gauge
|
||||
prometheus_local_storage_memory_chunks 1020
|
||||
# HELP prometheus_local_storage_memory_series The current number of series in memory.
|
||||
# TYPE prometheus_local_storage_memory_series gauge
|
||||
prometheus_local_storage_memory_series 424
|
||||
# HELP prometheus_local_storage_persist_latency_microseconds A summary of latencies for persisting each chunk.
|
||||
# TYPE prometheus_local_storage_persist_latency_microseconds summary
|
||||
prometheus_local_storage_persist_latency_microseconds{quantile="0.5"} 30.377
|
||||
prometheus_local_storage_persist_latency_microseconds{quantile="0.9"} 203.539
|
||||
prometheus_local_storage_persist_latency_microseconds{quantile="0.99"} 2626.463
|
||||
prometheus_local_storage_persist_latency_microseconds_sum 20424.415
|
||||
prometheus_local_storage_persist_latency_microseconds_count 174
|
||||
# HELP prometheus_local_storage_persist_queue_capacity The total capacity of the persist queue.
|
||||
# TYPE prometheus_local_storage_persist_queue_capacity gauge
|
||||
prometheus_local_storage_persist_queue_capacity 1024
|
||||
# HELP prometheus_local_storage_persist_queue_length The current number of chunks waiting in the persist queue.
|
||||
# TYPE prometheus_local_storage_persist_queue_length gauge
|
||||
prometheus_local_storage_persist_queue_length 0
|
||||
# HELP prometheus_local_storage_series_ops_total The total number of series operations by their type.
|
||||
# TYPE prometheus_local_storage_series_ops_total counter
|
||||
prometheus_local_storage_series_ops_total{type="create"} 2
|
||||
prometheus_local_storage_series_ops_total{type="maintenance_in_memory"} 11
|
||||
# HELP prometheus_notifications_latency_milliseconds Latency quantiles for sending alert notifications (not including dropped notifications).
|
||||
# TYPE prometheus_notifications_latency_milliseconds summary
|
||||
prometheus_notifications_latency_milliseconds{quantile="0.5"} 0
|
||||
prometheus_notifications_latency_milliseconds{quantile="0.9"} 0
|
||||
prometheus_notifications_latency_milliseconds{quantile="0.99"} 0
|
||||
prometheus_notifications_latency_milliseconds_sum 0
|
||||
prometheus_notifications_latency_milliseconds_count 0
|
||||
# HELP prometheus_notifications_queue_capacity The capacity of the alert notifications queue.
|
||||
# TYPE prometheus_notifications_queue_capacity gauge
|
||||
prometheus_notifications_queue_capacity 100
|
||||
# HELP prometheus_notifications_queue_length The number of alert notifications in the queue.
|
||||
# TYPE prometheus_notifications_queue_length gauge
|
||||
prometheus_notifications_queue_length 0
|
||||
# HELP prometheus_rule_evaluation_duration_milliseconds The duration for a rule to execute.
|
||||
# TYPE prometheus_rule_evaluation_duration_milliseconds summary
|
||||
prometheus_rule_evaluation_duration_milliseconds{rule_type="alerting",quantile="0.5"} 0
|
||||
prometheus_rule_evaluation_duration_milliseconds{rule_type="alerting",quantile="0.9"} 0
|
||||
prometheus_rule_evaluation_duration_milliseconds{rule_type="alerting",quantile="0.99"} 2
|
||||
prometheus_rule_evaluation_duration_milliseconds_sum{rule_type="alerting"} 12
|
||||
prometheus_rule_evaluation_duration_milliseconds_count{rule_type="alerting"} 115
|
||||
prometheus_rule_evaluation_duration_milliseconds{rule_type="recording",quantile="0.5"} 0
|
||||
prometheus_rule_evaluation_duration_milliseconds{rule_type="recording",quantile="0.9"} 0
|
||||
prometheus_rule_evaluation_duration_milliseconds{rule_type="recording",quantile="0.99"} 3
|
||||
prometheus_rule_evaluation_duration_milliseconds_sum{rule_type="recording"} 15
|
||||
prometheus_rule_evaluation_duration_milliseconds_count{rule_type="recording"} 115
|
||||
# HELP prometheus_rule_evaluation_failures_total The total number of rule evaluation failures.
|
||||
# TYPE prometheus_rule_evaluation_failures_total counter
|
||||
prometheus_rule_evaluation_failures_total 0
|
||||
# HELP prometheus_samples_queue_capacity Capacity of the queue for unwritten samples.
|
||||
# TYPE prometheus_samples_queue_capacity gauge
|
||||
prometheus_samples_queue_capacity 4096
|
||||
# HELP prometheus_samples_queue_length Current number of items in the queue for unwritten samples. Each item comprises all samples exposed by one target as one metric family (i.e. metrics of the same name).
|
||||
# TYPE prometheus_samples_queue_length gauge
|
||||
prometheus_samples_queue_length 0
|
||||
# HELP prometheus_target_interval_length_seconds Actual intervals between scrapes.
|
||||
# TYPE prometheus_target_interval_length_seconds summary
|
||||
prometheus_target_interval_length_seconds{interval="15s",quantile="0.01"} 14
|
||||
prometheus_target_interval_length_seconds{interval="15s",quantile="0.05"} 14
|
||||
prometheus_target_interval_length_seconds{interval="15s",quantile="0.5"} 15
|
||||
prometheus_target_interval_length_seconds{interval="15s",quantile="0.9"} 15
|
||||
prometheus_target_interval_length_seconds{interval="15s",quantile="0.99"} 15
|
||||
prometheus_target_interval_length_seconds_sum{interval="15s"} 175
|
||||
prometheus_target_interval_length_seconds_count{interval="15s"} 12
|
||||
prometheus_target_interval_length_seconds{interval="1s",quantile="0.01"} 0
|
||||
prometheus_target_interval_length_seconds{interval="1s",quantile="0.05"} 0
|
||||
prometheus_target_interval_length_seconds{interval="1s",quantile="0.5"} 0
|
||||
prometheus_target_interval_length_seconds{interval="1s",quantile="0.9"} 1
|
||||
prometheus_target_interval_length_seconds{interval="1s",quantile="0.99"} 1
|
||||
prometheus_target_interval_length_seconds_sum{interval="1s"} 55
|
||||
prometheus_target_interval_length_seconds_count{interval="1s"} 117
|
BIN
vendor/github.com/prometheus/common/expfmt/testdata/text.gz
generated
vendored
Normal file
BIN
vendor/github.com/prometheus/common/expfmt/testdata/text.gz
generated
vendored
Normal file
Binary file not shown.
89
vendor/github.com/prometheus/common/log/eventlog_formatter.go
generated
vendored
Normal file
89
vendor/github.com/prometheus/common/log/eventlog_formatter.go
generated
vendored
Normal file
|
@ -0,0 +1,89 @@
|
|||
// Copyright 2015 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// +build windows
|
||||
|
||||
package log
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"golang.org/x/sys/windows/svc/eventlog"
|
||||
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func init() {
|
||||
setEventlogFormatter = func(l logger, name string, debugAsInfo bool) error {
|
||||
if name == "" {
|
||||
return fmt.Errorf("missing name parameter")
|
||||
}
|
||||
|
||||
fmter, err := newEventlogger(name, debugAsInfo, l.entry.Logger.Formatter)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "error creating eventlog formatter: %v\n", err)
|
||||
l.Errorf("can't connect logger to eventlog: %v", err)
|
||||
return err
|
||||
}
|
||||
l.entry.Logger.Formatter = fmter
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
type eventlogger struct {
|
||||
log *eventlog.Log
|
||||
debugAsInfo bool
|
||||
wrap logrus.Formatter
|
||||
}
|
||||
|
||||
func newEventlogger(name string, debugAsInfo bool, fmter logrus.Formatter) (*eventlogger, error) {
|
||||
logHandle, err := eventlog.Open(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &eventlogger{log: logHandle, debugAsInfo: debugAsInfo, wrap: fmter}, nil
|
||||
}
|
||||
|
||||
func (s *eventlogger) Format(e *logrus.Entry) ([]byte, error) {
|
||||
data, err := s.wrap.Format(e)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "eventlogger: can't format entry: %v\n", err)
|
||||
return data, err
|
||||
}
|
||||
|
||||
switch e.Level {
|
||||
case logrus.PanicLevel:
|
||||
fallthrough
|
||||
case logrus.FatalLevel:
|
||||
fallthrough
|
||||
case logrus.ErrorLevel:
|
||||
err = s.log.Error(102, e.Message)
|
||||
case logrus.WarnLevel:
|
||||
err = s.log.Warning(101, e.Message)
|
||||
case logrus.InfoLevel:
|
||||
err = s.log.Info(100, e.Message)
|
||||
case logrus.DebugLevel:
|
||||
if s.debugAsInfo {
|
||||
err = s.log.Info(100, e.Message)
|
||||
}
|
||||
default:
|
||||
err = s.log.Info(100, e.Message)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "eventlogger: can't send log to eventlog: %v\n", err)
|
||||
}
|
||||
|
||||
return data, err
|
||||
}
|
364
vendor/github.com/prometheus/common/log/log.go
generated
vendored
Normal file
364
vendor/github.com/prometheus/common/log/log.go
generated
vendored
Normal file
|
@ -0,0 +1,364 @@
|
|||
// Copyright 2015 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package log
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/url"
|
||||
"os"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/sirupsen/logrus"
|
||||
"gopkg.in/alecthomas/kingpin.v2"
|
||||
)
|
||||
|
||||
// setSyslogFormatter is nil if the target architecture does not support syslog.
|
||||
var setSyslogFormatter func(logger, string, string) error
|
||||
|
||||
// setEventlogFormatter is nil if the target OS does not support Eventlog (i.e., is not Windows).
|
||||
var setEventlogFormatter func(logger, string, bool) error
|
||||
|
||||
func setJSONFormatter() {
|
||||
origLogger.Formatter = &logrus.JSONFormatter{}
|
||||
}
|
||||
|
||||
type loggerSettings struct {
|
||||
level string
|
||||
format string
|
||||
}
|
||||
|
||||
func (s *loggerSettings) apply(ctx *kingpin.ParseContext) error {
|
||||
err := baseLogger.SetLevel(s.level)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = baseLogger.SetFormat(s.format)
|
||||
return err
|
||||
}
|
||||
|
||||
// AddFlags adds the flags used by this package to the Kingpin application.
|
||||
// To use the default Kingpin application, call AddFlags(kingpin.CommandLine)
|
||||
func AddFlags(a *kingpin.Application) {
|
||||
s := loggerSettings{}
|
||||
a.Flag("log.level", "Only log messages with the given severity or above. Valid levels: [debug, info, warn, error, fatal]").
|
||||
Default(origLogger.Level.String()).
|
||||
StringVar(&s.level)
|
||||
defaultFormat := url.URL{Scheme: "logger", Opaque: "stderr"}
|
||||
a.Flag("log.format", `Set the log target and format. Example: "logger:syslog?appname=bob&local=7" or "logger:stdout?json=true"`).
|
||||
Default(defaultFormat.String()).
|
||||
StringVar(&s.format)
|
||||
a.Action(s.apply)
|
||||
}
|
||||
|
||||
// Logger is the interface for loggers used in the Prometheus components.
|
||||
type Logger interface {
|
||||
Debug(...interface{})
|
||||
Debugln(...interface{})
|
||||
Debugf(string, ...interface{})
|
||||
|
||||
Info(...interface{})
|
||||
Infoln(...interface{})
|
||||
Infof(string, ...interface{})
|
||||
|
||||
Warn(...interface{})
|
||||
Warnln(...interface{})
|
||||
Warnf(string, ...interface{})
|
||||
|
||||
Error(...interface{})
|
||||
Errorln(...interface{})
|
||||
Errorf(string, ...interface{})
|
||||
|
||||
Fatal(...interface{})
|
||||
Fatalln(...interface{})
|
||||
Fatalf(string, ...interface{})
|
||||
|
||||
With(key string, value interface{}) Logger
|
||||
|
||||
SetFormat(string) error
|
||||
SetLevel(string) error
|
||||
}
|
||||
|
||||
type logger struct {
|
||||
entry *logrus.Entry
|
||||
}
|
||||
|
||||
func (l logger) With(key string, value interface{}) Logger {
|
||||
return logger{l.entry.WithField(key, value)}
|
||||
}
|
||||
|
||||
// Debug logs a message at level Debug on the standard logger.
|
||||
func (l logger) Debug(args ...interface{}) {
|
||||
l.sourced().Debug(args...)
|
||||
}
|
||||
|
||||
// Debug logs a message at level Debug on the standard logger.
|
||||
func (l logger) Debugln(args ...interface{}) {
|
||||
l.sourced().Debugln(args...)
|
||||
}
|
||||
|
||||
// Debugf logs a message at level Debug on the standard logger.
|
||||
func (l logger) Debugf(format string, args ...interface{}) {
|
||||
l.sourced().Debugf(format, args...)
|
||||
}
|
||||
|
||||
// Info logs a message at level Info on the standard logger.
|
||||
func (l logger) Info(args ...interface{}) {
|
||||
l.sourced().Info(args...)
|
||||
}
|
||||
|
||||
// Info logs a message at level Info on the standard logger.
|
||||
func (l logger) Infoln(args ...interface{}) {
|
||||
l.sourced().Infoln(args...)
|
||||
}
|
||||
|
||||
// Infof logs a message at level Info on the standard logger.
|
||||
func (l logger) Infof(format string, args ...interface{}) {
|
||||
l.sourced().Infof(format, args...)
|
||||
}
|
||||
|
||||
// Warn logs a message at level Warn on the standard logger.
|
||||
func (l logger) Warn(args ...interface{}) {
|
||||
l.sourced().Warn(args...)
|
||||
}
|
||||
|
||||
// Warn logs a message at level Warn on the standard logger.
|
||||
func (l logger) Warnln(args ...interface{}) {
|
||||
l.sourced().Warnln(args...)
|
||||
}
|
||||
|
||||
// Warnf logs a message at level Warn on the standard logger.
|
||||
func (l logger) Warnf(format string, args ...interface{}) {
|
||||
l.sourced().Warnf(format, args...)
|
||||
}
|
||||
|
||||
// Error logs a message at level Error on the standard logger.
|
||||
func (l logger) Error(args ...interface{}) {
|
||||
l.sourced().Error(args...)
|
||||
}
|
||||
|
||||
// Error logs a message at level Error on the standard logger.
|
||||
func (l logger) Errorln(args ...interface{}) {
|
||||
l.sourced().Errorln(args...)
|
||||
}
|
||||
|
||||
// Errorf logs a message at level Error on the standard logger.
|
||||
func (l logger) Errorf(format string, args ...interface{}) {
|
||||
l.sourced().Errorf(format, args...)
|
||||
}
|
||||
|
||||
// Fatal logs a message at level Fatal on the standard logger.
|
||||
func (l logger) Fatal(args ...interface{}) {
|
||||
l.sourced().Fatal(args...)
|
||||
}
|
||||
|
||||
// Fatal logs a message at level Fatal on the standard logger.
|
||||
func (l logger) Fatalln(args ...interface{}) {
|
||||
l.sourced().Fatalln(args...)
|
||||
}
|
||||
|
||||
// Fatalf logs a message at level Fatal on the standard logger.
|
||||
func (l logger) Fatalf(format string, args ...interface{}) {
|
||||
l.sourced().Fatalf(format, args...)
|
||||
}
|
||||
|
||||
func (l logger) SetLevel(level string) error {
|
||||
lvl, err := logrus.ParseLevel(level)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
l.entry.Logger.Level = lvl
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l logger) SetFormat(format string) error {
|
||||
u, err := url.Parse(format)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if u.Scheme != "logger" {
|
||||
return fmt.Errorf("invalid scheme %s", u.Scheme)
|
||||
}
|
||||
jsonq := u.Query().Get("json")
|
||||
if jsonq == "true" {
|
||||
setJSONFormatter()
|
||||
}
|
||||
|
||||
switch u.Opaque {
|
||||
case "syslog":
|
||||
if setSyslogFormatter == nil {
|
||||
return fmt.Errorf("system does not support syslog")
|
||||
}
|
||||
appname := u.Query().Get("appname")
|
||||
facility := u.Query().Get("local")
|
||||
return setSyslogFormatter(l, appname, facility)
|
||||
case "eventlog":
|
||||
if setEventlogFormatter == nil {
|
||||
return fmt.Errorf("system does not support eventlog")
|
||||
}
|
||||
name := u.Query().Get("name")
|
||||
debugAsInfo := false
|
||||
debugAsInfoRaw := u.Query().Get("debugAsInfo")
|
||||
if parsedDebugAsInfo, err := strconv.ParseBool(debugAsInfoRaw); err == nil {
|
||||
debugAsInfo = parsedDebugAsInfo
|
||||
}
|
||||
return setEventlogFormatter(l, name, debugAsInfo)
|
||||
case "stdout":
|
||||
l.entry.Logger.Out = os.Stdout
|
||||
case "stderr":
|
||||
l.entry.Logger.Out = os.Stderr
|
||||
default:
|
||||
return fmt.Errorf("unsupported logger %q", u.Opaque)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// sourced adds a source field to the logger that contains
|
||||
// the file name and line where the logging happened.
|
||||
func (l logger) sourced() *logrus.Entry {
|
||||
_, file, line, ok := runtime.Caller(2)
|
||||
if !ok {
|
||||
file = "<???>"
|
||||
line = 1
|
||||
} else {
|
||||
slash := strings.LastIndex(file, "/")
|
||||
file = file[slash+1:]
|
||||
}
|
||||
return l.entry.WithField("source", fmt.Sprintf("%s:%d", file, line))
|
||||
}
|
||||
|
||||
var origLogger = logrus.New()
|
||||
var baseLogger = logger{entry: logrus.NewEntry(origLogger)}
|
||||
|
||||
// Base returns the default Logger logging to
|
||||
func Base() Logger {
|
||||
return baseLogger
|
||||
}
|
||||
|
||||
// NewLogger returns a new Logger logging to out.
|
||||
func NewLogger(w io.Writer) Logger {
|
||||
l := logrus.New()
|
||||
l.Out = w
|
||||
return logger{entry: logrus.NewEntry(l)}
|
||||
}
|
||||
|
||||
// NewNopLogger returns a logger that discards all log messages.
|
||||
func NewNopLogger() Logger {
|
||||
l := logrus.New()
|
||||
l.Out = ioutil.Discard
|
||||
return logger{entry: logrus.NewEntry(l)}
|
||||
}
|
||||
|
||||
// With adds a field to the logger.
|
||||
func With(key string, value interface{}) Logger {
|
||||
return baseLogger.With(key, value)
|
||||
}
|
||||
|
||||
// Debug logs a message at level Debug on the standard logger.
|
||||
func Debug(args ...interface{}) {
|
||||
baseLogger.sourced().Debug(args...)
|
||||
}
|
||||
|
||||
// Debugln logs a message at level Debug on the standard logger.
|
||||
func Debugln(args ...interface{}) {
|
||||
baseLogger.sourced().Debugln(args...)
|
||||
}
|
||||
|
||||
// Debugf logs a message at level Debug on the standard logger.
|
||||
func Debugf(format string, args ...interface{}) {
|
||||
baseLogger.sourced().Debugf(format, args...)
|
||||
}
|
||||
|
||||
// Info logs a message at level Info on the standard logger.
|
||||
func Info(args ...interface{}) {
|
||||
baseLogger.sourced().Info(args...)
|
||||
}
|
||||
|
||||
// Infoln logs a message at level Info on the standard logger.
|
||||
func Infoln(args ...interface{}) {
|
||||
baseLogger.sourced().Infoln(args...)
|
||||
}
|
||||
|
||||
// Infof logs a message at level Info on the standard logger.
|
||||
func Infof(format string, args ...interface{}) {
|
||||
baseLogger.sourced().Infof(format, args...)
|
||||
}
|
||||
|
||||
// Warn logs a message at level Warn on the standard logger.
|
||||
func Warn(args ...interface{}) {
|
||||
baseLogger.sourced().Warn(args...)
|
||||
}
|
||||
|
||||
// Warnln logs a message at level Warn on the standard logger.
|
||||
func Warnln(args ...interface{}) {
|
||||
baseLogger.sourced().Warnln(args...)
|
||||
}
|
||||
|
||||
// Warnf logs a message at level Warn on the standard logger.
|
||||
func Warnf(format string, args ...interface{}) {
|
||||
baseLogger.sourced().Warnf(format, args...)
|
||||
}
|
||||
|
||||
// Error logs a message at level Error on the standard logger.
|
||||
func Error(args ...interface{}) {
|
||||
baseLogger.sourced().Error(args...)
|
||||
}
|
||||
|
||||
// Errorln logs a message at level Error on the standard logger.
|
||||
func Errorln(args ...interface{}) {
|
||||
baseLogger.sourced().Errorln(args...)
|
||||
}
|
||||
|
||||
// Errorf logs a message at level Error on the standard logger.
|
||||
func Errorf(format string, args ...interface{}) {
|
||||
baseLogger.sourced().Errorf(format, args...)
|
||||
}
|
||||
|
||||
// Fatal logs a message at level Fatal on the standard logger.
|
||||
func Fatal(args ...interface{}) {
|
||||
baseLogger.sourced().Fatal(args...)
|
||||
}
|
||||
|
||||
// Fatalln logs a message at level Fatal on the standard logger.
|
||||
func Fatalln(args ...interface{}) {
|
||||
baseLogger.sourced().Fatalln(args...)
|
||||
}
|
||||
|
||||
// Fatalf logs a message at level Fatal on the standard logger.
|
||||
func Fatalf(format string, args ...interface{}) {
|
||||
baseLogger.sourced().Fatalf(format, args...)
|
||||
}
|
||||
|
||||
// AddHook adds hook to Prometheus' original logger.
|
||||
func AddHook(hook logrus.Hook) {
|
||||
origLogger.Hooks.Add(hook)
|
||||
}
|
||||
|
||||
type errorLogWriter struct{}
|
||||
|
||||
func (errorLogWriter) Write(b []byte) (int, error) {
|
||||
baseLogger.sourced().Error(string(b))
|
||||
return len(b), nil
|
||||
}
|
||||
|
||||
// NewErrorLogger returns a log.Logger that is meant to be used
|
||||
// in the ErrorLog field of an http.Server to log HTTP server errors.
|
||||
func NewErrorLogger() *log.Logger {
|
||||
return log.New(&errorLogWriter{}, "", 0)
|
||||
}
|
39
vendor/github.com/prometheus/common/log/log_test.go
generated
vendored
Normal file
39
vendor/github.com/prometheus/common/log/log_test.go
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
// Copyright 2015 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package log
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func TestFileLineLogging(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
origLogger.Out = &buf
|
||||
origLogger.Formatter = &logrus.TextFormatter{
|
||||
DisableColors: true,
|
||||
}
|
||||
|
||||
// The default logging level should be "info".
|
||||
Debug("This debug-level line should not show up in the output.")
|
||||
Infof("This %s-level line should show up in the output.", "info")
|
||||
|
||||
re := `^time=".*" level=info msg="This info-level line should show up in the output." source="log_test.go:33"\n$`
|
||||
if !regexp.MustCompile(re).Match(buf.Bytes()) {
|
||||
t.Fatalf("%q did not match expected regex %q", buf.String(), re)
|
||||
}
|
||||
}
|
126
vendor/github.com/prometheus/common/log/syslog_formatter.go
generated
vendored
Normal file
126
vendor/github.com/prometheus/common/log/syslog_formatter.go
generated
vendored
Normal file
|
@ -0,0 +1,126 @@
|
|||
// Copyright 2015 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// +build !windows,!nacl,!plan9
|
||||
|
||||
package log
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/syslog"
|
||||
"os"
|
||||
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var _ logrus.Formatter = (*syslogger)(nil)
|
||||
|
||||
func init() {
|
||||
setSyslogFormatter = func(l logger, appname, local string) error {
|
||||
if appname == "" {
|
||||
return fmt.Errorf("missing appname parameter")
|
||||
}
|
||||
if local == "" {
|
||||
return fmt.Errorf("missing local parameter")
|
||||
}
|
||||
|
||||
fmter, err := newSyslogger(appname, local, l.entry.Logger.Formatter)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "error creating syslog formatter: %v\n", err)
|
||||
l.entry.Errorf("can't connect logger to syslog: %v", err)
|
||||
return err
|
||||
}
|
||||
l.entry.Logger.Formatter = fmter
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
var prefixTag []byte
|
||||
|
||||
type syslogger struct {
|
||||
wrap logrus.Formatter
|
||||
out *syslog.Writer
|
||||
}
|
||||
|
||||
func newSyslogger(appname string, facility string, fmter logrus.Formatter) (*syslogger, error) {
|
||||
priority, err := getFacility(facility)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
out, err := syslog.New(priority, appname)
|
||||
_, isJSON := fmter.(*logrus.JSONFormatter)
|
||||
if isJSON {
|
||||
// add cee tag to json formatted syslogs
|
||||
prefixTag = []byte("@cee:")
|
||||
}
|
||||
return &syslogger{
|
||||
out: out,
|
||||
wrap: fmter,
|
||||
}, err
|
||||
}
|
||||
|
||||
func getFacility(facility string) (syslog.Priority, error) {
|
||||
switch facility {
|
||||
case "0":
|
||||
return syslog.LOG_LOCAL0, nil
|
||||
case "1":
|
||||
return syslog.LOG_LOCAL1, nil
|
||||
case "2":
|
||||
return syslog.LOG_LOCAL2, nil
|
||||
case "3":
|
||||
return syslog.LOG_LOCAL3, nil
|
||||
case "4":
|
||||
return syslog.LOG_LOCAL4, nil
|
||||
case "5":
|
||||
return syslog.LOG_LOCAL5, nil
|
||||
case "6":
|
||||
return syslog.LOG_LOCAL6, nil
|
||||
case "7":
|
||||
return syslog.LOG_LOCAL7, nil
|
||||
}
|
||||
return syslog.LOG_LOCAL0, fmt.Errorf("invalid local(%s) for syslog", facility)
|
||||
}
|
||||
|
||||
func (s *syslogger) Format(e *logrus.Entry) ([]byte, error) {
|
||||
data, err := s.wrap.Format(e)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "syslogger: can't format entry: %v\n", err)
|
||||
return data, err
|
||||
}
|
||||
// only append tag to data sent to syslog (line), not to what
|
||||
// is returned
|
||||
line := string(append(prefixTag, data...))
|
||||
|
||||
switch e.Level {
|
||||
case logrus.PanicLevel:
|
||||
err = s.out.Crit(line)
|
||||
case logrus.FatalLevel:
|
||||
err = s.out.Crit(line)
|
||||
case logrus.ErrorLevel:
|
||||
err = s.out.Err(line)
|
||||
case logrus.WarnLevel:
|
||||
err = s.out.Warning(line)
|
||||
case logrus.InfoLevel:
|
||||
err = s.out.Info(line)
|
||||
case logrus.DebugLevel:
|
||||
err = s.out.Debug(line)
|
||||
default:
|
||||
err = s.out.Notice(line)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "syslogger: can't send log to syslog: %v\n", err)
|
||||
}
|
||||
|
||||
return data, err
|
||||
}
|
52
vendor/github.com/prometheus/common/log/syslog_formatter_test.go
generated
vendored
Normal file
52
vendor/github.com/prometheus/common/log/syslog_formatter_test.go
generated
vendored
Normal file
|
@ -0,0 +1,52 @@
|
|||
// Copyright 2015 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// +build !windows,!nacl,!plan9
|
||||
|
||||
package log
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"log/syslog"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGetFacility(t *testing.T) {
|
||||
testCases := []struct {
|
||||
facility string
|
||||
expectedPriority syslog.Priority
|
||||
expectedErr error
|
||||
}{
|
||||
{"0", syslog.LOG_LOCAL0, nil},
|
||||
{"1", syslog.LOG_LOCAL1, nil},
|
||||
{"2", syslog.LOG_LOCAL2, nil},
|
||||
{"3", syslog.LOG_LOCAL3, nil},
|
||||
{"4", syslog.LOG_LOCAL4, nil},
|
||||
{"5", syslog.LOG_LOCAL5, nil},
|
||||
{"6", syslog.LOG_LOCAL6, nil},
|
||||
{"7", syslog.LOG_LOCAL7, nil},
|
||||
{"8", syslog.LOG_LOCAL0, errors.New("invalid local(8) for syslog")},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
priority, err := getFacility(tc.facility)
|
||||
if err != tc.expectedErr {
|
||||
if err.Error() != tc.expectedErr.Error() {
|
||||
t.Errorf("want %s, got %s", tc.expectedErr.Error(), err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
if priority != tc.expectedPriority {
|
||||
t.Errorf("want %q, got %q", tc.expectedPriority, priority)
|
||||
}
|
||||
}
|
||||
}
|
33
vendor/github.com/prometheus/common/promlog/flag/flag.go
generated
vendored
Normal file
33
vendor/github.com/prometheus/common/promlog/flag/flag.go
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
// Copyright 2017 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package flag
|
||||
|
||||
import (
|
||||
"github.com/prometheus/common/promlog"
|
||||
kingpin "gopkg.in/alecthomas/kingpin.v2"
|
||||
)
|
||||
|
||||
// LevelFlagName is the canonical flag name to configure the allowed log level
|
||||
// within Prometheus projects.
|
||||
const LevelFlagName = "log.level"
|
||||
|
||||
// LevelFlagHelp is the help description for the log.level flag.
|
||||
const LevelFlagHelp = "Only log messages with the given severity or above. One of: [debug, info, warn, error]"
|
||||
|
||||
// AddFlags adds the flags used by this package to the Kingpin application.
|
||||
// To use the default Kingpin application, call AddFlags(kingpin.CommandLine)
|
||||
func AddFlags(a *kingpin.Application, logLevel *promlog.AllowedLevel) {
|
||||
a.Flag(LevelFlagName, LevelFlagHelp).
|
||||
Default("info").SetValue(logLevel)
|
||||
}
|
63
vendor/github.com/prometheus/common/promlog/log.go
generated
vendored
Normal file
63
vendor/github.com/prometheus/common/promlog/log.go
generated
vendored
Normal file
|
@ -0,0 +1,63 @@
|
|||
// Copyright 2017 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package promlog defines standardised ways to initialize Go kit loggers
|
||||
// across Prometheus components.
|
||||
// It should typically only ever be imported by main packages.
|
||||
package promlog
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/go-kit/kit/log"
|
||||
"github.com/go-kit/kit/log/level"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// AllowedLevel is a settable identifier for the minimum level a log entry
|
||||
// must be have.
|
||||
type AllowedLevel struct {
|
||||
s string
|
||||
o level.Option
|
||||
}
|
||||
|
||||
func (l *AllowedLevel) String() string {
|
||||
return l.s
|
||||
}
|
||||
|
||||
// Set updates the value of the allowed level.
|
||||
func (l *AllowedLevel) Set(s string) error {
|
||||
switch s {
|
||||
case "debug":
|
||||
l.o = level.AllowDebug()
|
||||
case "info":
|
||||
l.o = level.AllowInfo()
|
||||
case "warn":
|
||||
l.o = level.AllowWarn()
|
||||
case "error":
|
||||
l.o = level.AllowError()
|
||||
default:
|
||||
return errors.Errorf("unrecognized log level %q", s)
|
||||
}
|
||||
l.s = s
|
||||
return nil
|
||||
}
|
||||
|
||||
// New returns a new leveled oklog logger in the logfmt format. Each logged line will be annotated
|
||||
// with a timestamp. The output always goes to stderr.
|
||||
func New(al AllowedLevel) log.Logger {
|
||||
l := log.NewLogfmtLogger(log.NewSyncWriter(os.Stderr))
|
||||
l = level.NewFilter(l, al.o)
|
||||
l = log.With(l, "ts", log.DefaultTimestampUTC, "caller", log.DefaultCaller)
|
||||
return l
|
||||
}
|
110
vendor/github.com/prometheus/common/route/route.go
generated
vendored
Normal file
110
vendor/github.com/prometheus/common/route/route.go
generated
vendored
Normal file
|
@ -0,0 +1,110 @@
|
|||
package route
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/julienschmidt/httprouter"
|
||||
"golang.org/x/net/context"
|
||||
)
|
||||
|
||||
type param string
|
||||
|
||||
// Param returns param p for the context.
|
||||
func Param(ctx context.Context, p string) string {
|
||||
return ctx.Value(param(p)).(string)
|
||||
}
|
||||
|
||||
// WithParam returns a new context with param p set to v.
|
||||
func WithParam(ctx context.Context, p, v string) context.Context {
|
||||
return context.WithValue(ctx, param(p), v)
|
||||
}
|
||||
|
||||
// Router wraps httprouter.Router and adds support for prefixed sub-routers,
|
||||
// per-request context injections and instrumentation.
|
||||
type Router struct {
|
||||
rtr *httprouter.Router
|
||||
prefix string
|
||||
instrh func(handlerName string, handler http.HandlerFunc) http.HandlerFunc
|
||||
}
|
||||
|
||||
// New returns a new Router.
|
||||
func New() *Router {
|
||||
return &Router{
|
||||
rtr: httprouter.New(),
|
||||
}
|
||||
}
|
||||
|
||||
// WithInstrumentation returns a router with instrumentation support.
|
||||
func (r *Router) WithInstrumentation(instrh func(handlerName string, handler http.HandlerFunc) http.HandlerFunc) *Router {
|
||||
return &Router{rtr: r.rtr, prefix: r.prefix, instrh: instrh}
|
||||
}
|
||||
|
||||
// WithPrefix returns a router that prefixes all registered routes with prefix.
|
||||
func (r *Router) WithPrefix(prefix string) *Router {
|
||||
return &Router{rtr: r.rtr, prefix: r.prefix + prefix, instrh: r.instrh}
|
||||
}
|
||||
|
||||
// handle turns a HandlerFunc into an httprouter.Handle.
|
||||
func (r *Router) handle(handlerName string, h http.HandlerFunc) httprouter.Handle {
|
||||
if r.instrh != nil {
|
||||
// This needs to be outside the closure to avoid data race when reading and writing to 'h'.
|
||||
h = r.instrh(handlerName, h)
|
||||
}
|
||||
return func(w http.ResponseWriter, req *http.Request, params httprouter.Params) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
|
||||
for _, p := range params {
|
||||
ctx = context.WithValue(ctx, param(p.Key), p.Value)
|
||||
}
|
||||
h(w, req.WithContext(ctx))
|
||||
}
|
||||
}
|
||||
|
||||
// Get registers a new GET route.
|
||||
func (r *Router) Get(path string, h http.HandlerFunc) {
|
||||
r.rtr.GET(r.prefix+path, r.handle(path, h))
|
||||
}
|
||||
|
||||
// Options registers a new OPTIONS route.
|
||||
func (r *Router) Options(path string, h http.HandlerFunc) {
|
||||
r.rtr.OPTIONS(r.prefix+path, r.handle(path, h))
|
||||
}
|
||||
|
||||
// Del registers a new DELETE route.
|
||||
func (r *Router) Del(path string, h http.HandlerFunc) {
|
||||
r.rtr.DELETE(r.prefix+path, r.handle(path, h))
|
||||
}
|
||||
|
||||
// Put registers a new PUT route.
|
||||
func (r *Router) Put(path string, h http.HandlerFunc) {
|
||||
r.rtr.PUT(r.prefix+path, r.handle(path, h))
|
||||
}
|
||||
|
||||
// Post registers a new POST route.
|
||||
func (r *Router) Post(path string, h http.HandlerFunc) {
|
||||
r.rtr.POST(r.prefix+path, r.handle(path, h))
|
||||
}
|
||||
|
||||
// Redirect takes an absolute path and sends an internal HTTP redirect for it,
|
||||
// prefixed by the router's path prefix. Note that this method does not include
|
||||
// functionality for handling relative paths or full URL redirects.
|
||||
func (r *Router) Redirect(w http.ResponseWriter, req *http.Request, path string, code int) {
|
||||
http.Redirect(w, req, r.prefix+path, code)
|
||||
}
|
||||
|
||||
// ServeHTTP implements http.Handler.
|
||||
func (r *Router) ServeHTTP(w http.ResponseWriter, req *http.Request) {
|
||||
r.rtr.ServeHTTP(w, req)
|
||||
}
|
||||
|
||||
// FileServe returns a new http.HandlerFunc that serves files from dir.
|
||||
// Using routes must provide the *filepath parameter.
|
||||
func FileServe(dir string) http.HandlerFunc {
|
||||
fs := http.FileServer(http.Dir(dir))
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
r.URL.Path = Param(r.Context(), "filepath")
|
||||
fs.ServeHTTP(w, r)
|
||||
}
|
||||
}
|
76
vendor/github.com/prometheus/common/route/route_test.go
generated
vendored
Normal file
76
vendor/github.com/prometheus/common/route/route_test.go
generated
vendored
Normal file
|
@ -0,0 +1,76 @@
|
|||
package route
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestRedirect(t *testing.T) {
|
||||
router := New().WithPrefix("/test/prefix")
|
||||
w := httptest.NewRecorder()
|
||||
r, err := http.NewRequest("GET", "http://localhost:9090/foo", nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error building test request: %s", err)
|
||||
}
|
||||
|
||||
router.Redirect(w, r, "/some/endpoint", http.StatusFound)
|
||||
if w.Code != http.StatusFound {
|
||||
t.Fatalf("Unexpected redirect status code: got %d, want %d", w.Code, http.StatusFound)
|
||||
}
|
||||
|
||||
want := "/test/prefix/some/endpoint"
|
||||
got := w.Header()["Location"][0]
|
||||
if want != got {
|
||||
t.Fatalf("Unexpected redirect location: got %s, want %s", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestContext(t *testing.T) {
|
||||
router := New()
|
||||
router.Get("/test/:foo/", func(w http.ResponseWriter, r *http.Request) {
|
||||
want := "bar"
|
||||
got := Param(r.Context(), "foo")
|
||||
if want != got {
|
||||
t.Fatalf("Unexpected context value: want %q, got %q", want, got)
|
||||
}
|
||||
})
|
||||
|
||||
r, err := http.NewRequest("GET", "http://localhost:9090/test/bar/", nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error building test request: %s", err)
|
||||
}
|
||||
router.ServeHTTP(nil, r)
|
||||
}
|
||||
|
||||
func TestInstrumentation(t *testing.T) {
|
||||
var got string
|
||||
cases := []struct {
|
||||
router *Router
|
||||
want string
|
||||
}{
|
||||
{
|
||||
router: New(),
|
||||
want: "",
|
||||
}, {
|
||||
router: New().WithInstrumentation(func(handlerName string, handler http.HandlerFunc) http.HandlerFunc {
|
||||
got = handlerName
|
||||
return handler
|
||||
}),
|
||||
want: "/foo",
|
||||
},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
c.router.Get("/foo", func(w http.ResponseWriter, r *http.Request) {})
|
||||
|
||||
r, err := http.NewRequest("GET", "http://localhost:9090/foo", nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error building test request: %s", err)
|
||||
}
|
||||
c.router.ServeHTTP(nil, r)
|
||||
if c.want != got {
|
||||
t.Fatalf("Unexpected value: want %q, got %q", c.want, got)
|
||||
}
|
||||
}
|
||||
}
|
89
vendor/github.com/prometheus/common/version/info.go
generated
vendored
Normal file
89
vendor/github.com/prometheus/common/version/info.go
generated
vendored
Normal file
|
@ -0,0 +1,89 @@
|
|||
// Copyright 2016 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package version
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"runtime"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
)
|
||||
|
||||
// Build information. Populated at build-time.
|
||||
var (
|
||||
Version string
|
||||
Revision string
|
||||
Branch string
|
||||
BuildUser string
|
||||
BuildDate string
|
||||
GoVersion = runtime.Version()
|
||||
)
|
||||
|
||||
// NewCollector returns a collector which exports metrics about current version information.
|
||||
func NewCollector(program string) *prometheus.GaugeVec {
|
||||
buildInfo := prometheus.NewGaugeVec(
|
||||
prometheus.GaugeOpts{
|
||||
Namespace: program,
|
||||
Name: "build_info",
|
||||
Help: fmt.Sprintf(
|
||||
"A metric with a constant '1' value labeled by version, revision, branch, and goversion from which %s was built.",
|
||||
program,
|
||||
),
|
||||
},
|
||||
[]string{"version", "revision", "branch", "goversion"},
|
||||
)
|
||||
buildInfo.WithLabelValues(Version, Revision, Branch, GoVersion).Set(1)
|
||||
return buildInfo
|
||||
}
|
||||
|
||||
// versionInfoTmpl contains the template used by Info.
|
||||
var versionInfoTmpl = `
|
||||
{{.program}}, version {{.version}} (branch: {{.branch}}, revision: {{.revision}})
|
||||
build user: {{.buildUser}}
|
||||
build date: {{.buildDate}}
|
||||
go version: {{.goVersion}}
|
||||
`
|
||||
|
||||
// Print returns version information.
|
||||
func Print(program string) string {
|
||||
m := map[string]string{
|
||||
"program": program,
|
||||
"version": Version,
|
||||
"revision": Revision,
|
||||
"branch": Branch,
|
||||
"buildUser": BuildUser,
|
||||
"buildDate": BuildDate,
|
||||
"goVersion": GoVersion,
|
||||
}
|
||||
t := template.Must(template.New("version").Parse(versionInfoTmpl))
|
||||
|
||||
var buf bytes.Buffer
|
||||
if err := t.ExecuteTemplate(&buf, "version", m); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return strings.TrimSpace(buf.String())
|
||||
}
|
||||
|
||||
// Info returns version, branch and revision information.
|
||||
func Info() string {
|
||||
return fmt.Sprintf("(version=%s, branch=%s, revision=%s)", Version, Branch, Revision)
|
||||
}
|
||||
|
||||
// BuildContext returns goVersion, buildUser and buildDate information.
|
||||
func BuildContext() string {
|
||||
return fmt.Sprintf("(go=%s, user=%s, date=%s)", GoVersion, BuildUser, BuildDate)
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue